Merge branch 'main' into table-subheaders
222
Cargo.lock
generated
@ -217,6 +217,20 @@ name = "bytemuck"
|
||||
version = "1.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
|
||||
dependencies = [
|
||||
"bytemuck_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck_derive"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
@ -735,11 +749,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.0.35"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
|
||||
checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-rs-sys",
|
||||
"miniz_oxide",
|
||||
]
|
||||
|
||||
@ -749,6 +764,15 @@ version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
|
||||
|
||||
[[package]]
|
||||
name = "float-cmp"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fnv"
|
||||
version = "1.0.7"
|
||||
@ -761,6 +785,15 @@ version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
|
||||
|
||||
[[package]]
|
||||
name = "font-types"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1fa6a5e5a77b5f3f7f9e32879f484aa5b3632ddfbe568a16266c904a6f32cdaf"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fontconfig-parser"
|
||||
version = "0.5.7"
|
||||
@ -829,6 +862,15 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||
|
||||
[[package]]
|
||||
name = "fxhash"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getopts"
|
||||
version = "0.2.21"
|
||||
@ -871,6 +913,12 @@ dependencies = [
|
||||
"weezl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glidesort"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2e102e6eb644d3e0b186fc161e4460417880a0a0b87d235f2e5b8fb30f2e9e0"
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
version = "2.4.1"
|
||||
@ -966,7 +1014,7 @@ checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
|
||||
dependencies = [
|
||||
"displaydoc",
|
||||
"serde",
|
||||
"yoke",
|
||||
"yoke 0.7.5",
|
||||
"zerofrom",
|
||||
"zerovec",
|
||||
]
|
||||
@ -1064,7 +1112,7 @@ dependencies = [
|
||||
"stable_deref_trait",
|
||||
"tinystr",
|
||||
"writeable",
|
||||
"yoke",
|
||||
"yoke 0.7.5",
|
||||
"zerofrom",
|
||||
"zerovec",
|
||||
]
|
||||
@ -1310,6 +1358,50 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "krilla"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69ee6128ebf52d7ce684613b6431ead2959f2be9ff8cf776eeaaad0427c953e9"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bumpalo",
|
||||
"comemo",
|
||||
"flate2",
|
||||
"float-cmp 0.10.0",
|
||||
"fxhash",
|
||||
"gif",
|
||||
"image-webp",
|
||||
"imagesize",
|
||||
"once_cell",
|
||||
"pdf-writer",
|
||||
"png",
|
||||
"rayon",
|
||||
"rustybuzz",
|
||||
"siphasher",
|
||||
"skrifa",
|
||||
"subsetter",
|
||||
"tiny-skia-path",
|
||||
"xmp-writer",
|
||||
"yoke 0.8.0",
|
||||
"zune-jpeg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "krilla-svg"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3462989578155cf620ef8035f8921533cc95c28e2a0c75de172f7219e6aba84e"
|
||||
dependencies = [
|
||||
"flate2",
|
||||
"fontdb",
|
||||
"krilla",
|
||||
"png",
|
||||
"resvg",
|
||||
"tiny-skia",
|
||||
"usvg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kurbo"
|
||||
version = "0.11.1"
|
||||
@ -1371,6 +1463,15 @@ dependencies = [
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-rs-sys"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d"
|
||||
dependencies = [
|
||||
"zlib-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.6"
|
||||
@ -1458,9 +1559,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.3"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924"
|
||||
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
|
||||
dependencies = [
|
||||
"adler2",
|
||||
"simd-adler32",
|
||||
@ -1601,9 +1702,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "openssl"
|
||||
version = "0.10.70"
|
||||
version = "0.10.72"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
|
||||
checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"cfg-if",
|
||||
@ -1642,9 +1743,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.105"
|
||||
version = "0.9.107"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc"
|
||||
checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
@ -1738,9 +1839,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
|
||||
|
||||
[[package]]
|
||||
name = "pdf-writer"
|
||||
version = "0.12.1"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5df03c7d216de06f93f398ef06f1385a60f2c597bb96f8195c8d98e08a26b1d5"
|
||||
checksum = "3ea27c5015ab81753fc61e49f8cde74999346605ee148bb20008ef3d3150e0dc"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"itoa",
|
||||
@ -1997,6 +2098,16 @@ dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "read-fonts"
|
||||
version = "0.28.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "600e807b48ac55bad68a8cb75cc3c7739f139b9248f7e003e01e080f589b5288"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"font-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.8"
|
||||
@ -2315,6 +2426,16 @@ version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
|
||||
|
||||
[[package]]
|
||||
name = "skrifa"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fa1e5622e4f7b98877e8a19890efddcac1230cec6198bd9de91ec0e00010dc8"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"read-fonts",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "slotmap"
|
||||
version = "1.0.7"
|
||||
@ -2361,7 +2482,7 @@ version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731"
|
||||
dependencies = [
|
||||
"float-cmp",
|
||||
"float-cmp 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2404,28 +2525,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "subsetter"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74f98178f34057d4d4de93d68104007c6dea4dfac930204a69ab4622daefa648"
|
||||
|
||||
[[package]]
|
||||
name = "svg2pdf"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e50dc062439cc1a396181059c80932a6e6bd731b130e674c597c0c8874b6df22"
|
||||
checksum = "35539e8de3dcce8dd0c01f3575f85db1e5ac1aea1b996d2d09d89f148bc91497"
|
||||
dependencies = [
|
||||
"fontdb",
|
||||
"image",
|
||||
"log",
|
||||
"miniz_oxide",
|
||||
"once_cell",
|
||||
"pdf-writer",
|
||||
"resvg",
|
||||
"siphasher",
|
||||
"subsetter",
|
||||
"tiny-skia",
|
||||
"ttf-parser",
|
||||
"usvg",
|
||||
"fxhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2803,7 +2907,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "typst-dev-assets"
|
||||
version = "0.13.1"
|
||||
source = "git+https://github.com/typst/typst-dev-assets?rev=9879589#9879589f4b3247b12c5e694d0d7fa86d4d8a198e"
|
||||
source = "git+https://github.com/typst/typst-dev-assets?rev=fddbf8b#fddbf8b99506bc370ac0edcd4959add603a7fc92"
|
||||
|
||||
[[package]]
|
||||
name = "typst-docs"
|
||||
@ -2958,6 +3062,7 @@ dependencies = [
|
||||
"ecow",
|
||||
"flate2",
|
||||
"fontdb",
|
||||
"glidesort",
|
||||
"hayagriva",
|
||||
"icu_properties",
|
||||
"icu_provider",
|
||||
@ -3018,26 +3123,19 @@ dependencies = [
|
||||
name = "typst-pdf"
|
||||
version = "0.13.1"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"base64",
|
||||
"bytemuck",
|
||||
"comemo",
|
||||
"ecow",
|
||||
"image",
|
||||
"indexmap 2.7.1",
|
||||
"miniz_oxide",
|
||||
"pdf-writer",
|
||||
"krilla",
|
||||
"krilla-svg",
|
||||
"serde",
|
||||
"subsetter",
|
||||
"svg2pdf",
|
||||
"ttf-parser",
|
||||
"typst-assets",
|
||||
"typst-library",
|
||||
"typst-macros",
|
||||
"typst-syntax",
|
||||
"typst-timing",
|
||||
"typst-utils",
|
||||
"xmp-writer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3661,9 +3759,9 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
|
||||
|
||||
[[package]]
|
||||
name = "xmp-writer"
|
||||
version = "0.3.1"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7eb5954c9ca6dcc869e98d3e42760ed9dab08f3e70212b31d7ab8ae7f3b7a487"
|
||||
checksum = "ce9e2f4a404d9ebffc0a9832cf4f50907220ba3d7fffa9099261a5cab52f2dd7"
|
||||
|
||||
[[package]]
|
||||
name = "xz2"
|
||||
@ -3701,7 +3799,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"stable_deref_trait",
|
||||
"yoke-derive",
|
||||
"yoke-derive 0.7.5",
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"stable_deref_trait",
|
||||
"yoke-derive 0.8.0",
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
@ -3717,6 +3827,18 @@ dependencies = [
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "yoke-derive"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
"synstructure",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.35"
|
||||
@ -3778,7 +3900,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"yoke",
|
||||
"yoke 0.7.5",
|
||||
"zerofrom",
|
||||
"zerovec-derive",
|
||||
]
|
||||
@ -3809,6 +3931,12 @@ dependencies = [
|
||||
"zopfli",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zlib-rs"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05"
|
||||
|
||||
[[package]]
|
||||
name = "zopfli"
|
||||
version = "0.8.1"
|
||||
|
12
Cargo.toml
@ -33,7 +33,7 @@ typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
|
||||
typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
|
||||
typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
|
||||
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "ab1295f" }
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "9879589" }
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "fddbf8b" }
|
||||
arrayvec = "0.7.4"
|
||||
az = "1.2"
|
||||
base64 = "0.22"
|
||||
@ -59,6 +59,7 @@ fastrand = "2.3"
|
||||
flate2 = "1"
|
||||
fontdb = { version = "0.23", default-features = false }
|
||||
fs_extra = "1.3"
|
||||
glidesort = "0.1.2"
|
||||
hayagriva = "0.8.1"
|
||||
heck = "0.5"
|
||||
hypher = "0.1.4"
|
||||
@ -71,21 +72,21 @@ if_chain = "1"
|
||||
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
kamadak-exif = "0.6"
|
||||
krilla = { version = "0.4.0", default-features = false, features = ["raster-images", "comemo", "rayon"] }
|
||||
krilla-svg = "0.1.0"
|
||||
kurbo = "0.11"
|
||||
libfuzzer-sys = "0.4"
|
||||
lipsum = "0.9"
|
||||
memchr = "2"
|
||||
miniz_oxide = "0.8"
|
||||
native-tls = "0.2"
|
||||
notify = "8"
|
||||
once_cell = "1"
|
||||
open = "5.0.1"
|
||||
openssl = "0.10"
|
||||
openssl = "0.10.72"
|
||||
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
|
||||
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pdf-writer = "0.12.1"
|
||||
phf = { version = "0.11", features = ["macros"] }
|
||||
pixglyph = "0.6"
|
||||
png = "0.17"
|
||||
@ -112,8 +113,6 @@ sigpipe = "0.1"
|
||||
siphasher = "1"
|
||||
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
|
||||
stacker = "0.1.15"
|
||||
subsetter = "0.2"
|
||||
svg2pdf = "0.13"
|
||||
syn = { version = "2", features = ["full", "extra-traits"] }
|
||||
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
|
||||
tar = "0.4"
|
||||
@ -140,7 +139,6 @@ wasmi = "0.40.0"
|
||||
web-sys = "0.3"
|
||||
xmlparser = "0.13.5"
|
||||
xmlwriter = "0.1.0"
|
||||
xmp-writer = "0.3.1"
|
||||
xz2 = { version = "0.1", features = ["static"] }
|
||||
yaml-front-matter = "0.1"
|
||||
zip = { version = "2.5", default-features = false, features = ["deflate"] }
|
||||
|
24
README.md
@ -177,22 +177,22 @@ If you prefer an integrated IDE-like experience with autocompletion and instant
|
||||
preview, you can also check out [Typst's free web app][app].
|
||||
|
||||
## Community
|
||||
The main place where the community gathers is our [Discord server][discord].
|
||||
Feel free to join there to ask questions, help out others, share cool things
|
||||
you created with Typst, or just to chat.
|
||||
The main places where the community gathers are our [Forum][forum] and our
|
||||
[Discord server][discord]. The Forum is a great place to ask questions, help
|
||||
others, and share cool things you created with Typst. The Discord server is more
|
||||
suitable for quicker questions, discussions about contributing, or just to chat.
|
||||
We'd be happy to see you there!
|
||||
|
||||
Aside from that there are a few places where you can find things built by
|
||||
the community:
|
||||
|
||||
- The official [package list](https://typst.app/docs/packages)
|
||||
- The [Awesome Typst](https://github.com/qjcg/awesome-typst) repository
|
||||
[Typst Universe][universe] is where the community shares templates and packages.
|
||||
If you want to share your own creations, you can submit them to our
|
||||
[package repository][packages].
|
||||
|
||||
If you had a bad experience in our community, please [reach out to us][contact].
|
||||
|
||||
## Contributing
|
||||
We would love to see contributions from the community. If you experience bugs,
|
||||
feel free to open an issue. If you would like to implement a new feature or bug
|
||||
fix, please follow the steps outlined in the [contribution guide][contributing].
|
||||
We love to see contributions from the community. If you experience bugs, feel
|
||||
free to open an issue. If you would like to implement a new feature or bug fix,
|
||||
please follow the steps outlined in the [contribution guide][contributing].
|
||||
|
||||
To build Typst yourself, first ensure that you have the
|
||||
[latest stable Rust][rust] installed. Then, clone this repository and build the
|
||||
@ -243,6 +243,8 @@ instant preview. To achieve these goals, we follow three core design principles:
|
||||
[docs]: https://typst.app/docs/
|
||||
[app]: https://typst.app/
|
||||
[discord]: https://discord.gg/2uDybryKPe
|
||||
[forum]: https://forum.typst.app/
|
||||
[universe]: https://typst.app/universe/
|
||||
[tutorial]: https://typst.app/docs/tutorial/
|
||||
[show]: https://typst.app/docs/reference/styling/#show-rules
|
||||
[math]: https://typst.app/docs/reference/math/
|
||||
|
@ -361,7 +361,7 @@ pub struct FontArgs {
|
||||
|
||||
/// Ensures system fonts won't be searched, unless explicitly included via
|
||||
/// `--font-path`.
|
||||
#[arg(long)]
|
||||
#[arg(long, env = "TYPST_IGNORE_SYSTEM_FONTS")]
|
||||
pub ignore_system_fonts: bool,
|
||||
}
|
||||
|
||||
@ -467,15 +467,45 @@ display_possible_values!(Feature);
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.4.
|
||||
#[value(name = "1.4")]
|
||||
V_1_4,
|
||||
/// PDF 1.5.
|
||||
#[value(name = "1.5")]
|
||||
V_1_5,
|
||||
/// PDF 1.5.
|
||||
#[value(name = "1.6")]
|
||||
V_1_6,
|
||||
/// PDF 1.7.
|
||||
#[value(name = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF 2.0.
|
||||
#[value(name = "2.0")]
|
||||
V_2_0,
|
||||
/// PDF/A-1b.
|
||||
#[value(name = "a-1b")]
|
||||
A_1b,
|
||||
/// PDF/A-2b.
|
||||
#[value(name = "a-2b")]
|
||||
A_2b,
|
||||
/// PDF/A-3b.
|
||||
/// PDF/A-2u.
|
||||
#[value(name = "a-2u")]
|
||||
A_2u,
|
||||
/// PDF/A-3u.
|
||||
#[value(name = "a-3b")]
|
||||
A_3b,
|
||||
/// PDF/A-3u.
|
||||
#[value(name = "a-3u")]
|
||||
A_3u,
|
||||
/// PDF/A-4.
|
||||
#[value(name = "a-4")]
|
||||
A_4,
|
||||
/// PDF/A-4f.
|
||||
#[value(name = "a-4f")]
|
||||
A_4f,
|
||||
/// PDF/A-4e.
|
||||
#[value(name = "a-4e")]
|
||||
A_4e,
|
||||
}
|
||||
|
||||
display_possible_values!(PdfStandard);
|
||||
|
@ -63,8 +63,7 @@ pub struct CompileConfig {
|
||||
/// Opens the output file with the default viewer or a specific program after
|
||||
/// compilation.
|
||||
pub open: Option<Option<String>>,
|
||||
/// One (or multiple comma-separated) PDF standards that Typst will enforce
|
||||
/// conformance with.
|
||||
/// A list of standards the PDF should conform to.
|
||||
pub pdf_standards: PdfStandards,
|
||||
/// A path to write a Makefile rule describing the current compilation.
|
||||
pub make_deps: Option<PathBuf>,
|
||||
@ -130,18 +129,9 @@ impl CompileConfig {
|
||||
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
|
||||
});
|
||||
|
||||
let pdf_standards = {
|
||||
let list = args
|
||||
.pdf_standard
|
||||
.iter()
|
||||
.map(|standard| match standard {
|
||||
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
||||
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
||||
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
PdfStandards::new(&list)?
|
||||
};
|
||||
let pdf_standards = PdfStandards::new(
|
||||
&args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
|
||||
)?;
|
||||
|
||||
#[cfg(feature = "http-server")]
|
||||
let server = match watch {
|
||||
@ -295,6 +285,7 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
let options = PdfOptions {
|
||||
ident: Smart::Auto,
|
||||
timestamp,
|
||||
@ -765,3 +756,23 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PdfStandard> for typst_pdf::PdfStandard {
|
||||
fn from(standard: PdfStandard) -> Self {
|
||||
match standard {
|
||||
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
|
||||
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
|
||||
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
|
||||
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
||||
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
|
||||
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
|
||||
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
||||
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
|
||||
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
|
||||
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
|
||||
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
|
||||
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
|
||||
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use comemo::Track;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use serde::Serialize;
|
||||
use typst::diag::{bail, HintedStrResult, StrResult, Warned};
|
||||
use typst::engine::Sink;
|
||||
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
|
||||
use typst::layout::PagedDocument;
|
||||
use typst::syntax::Span;
|
||||
@ -58,6 +59,8 @@ fn retrieve(
|
||||
let selector = eval_string(
|
||||
&typst::ROUTINES,
|
||||
world.track(),
|
||||
// TODO: propagate warnings
|
||||
Sink::new().track_mut(),
|
||||
&command.selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
|
@ -210,7 +210,9 @@ impl World for SystemWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
self.fonts[index].get()
|
||||
// comemo's validation may invoke this function with an invalid index. This is
|
||||
// impossible in typst-cli but possible if a custom tool mutates the fonts.
|
||||
self.fonts.get(index)?.get()
|
||||
}
|
||||
|
||||
fn today(&self, offset: Option<i64>) -> Option<Datetime> {
|
||||
|
@ -101,6 +101,7 @@ pub fn eval(
|
||||
pub fn eval_string(
|
||||
routines: &Routines,
|
||||
world: Tracked<dyn World + '_>,
|
||||
sink: TrackedMut<Sink>,
|
||||
string: &str,
|
||||
span: Span,
|
||||
mode: EvalMode,
|
||||
@ -121,7 +122,6 @@ pub fn eval_string(
|
||||
}
|
||||
|
||||
// Prepare the engine.
|
||||
let mut sink = Sink::new();
|
||||
let introspector = Introspector::default();
|
||||
let traced = Traced::default();
|
||||
let engine = Engine {
|
||||
@ -129,7 +129,7 @@ pub fn eval_string(
|
||||
world,
|
||||
introspector: introspector.track(),
|
||||
traced: traced.track(),
|
||||
sink: sink.track_mut(),
|
||||
sink,
|
||||
route: Route::default(),
|
||||
};
|
||||
|
||||
|
@ -263,13 +263,13 @@ fn handle(
|
||||
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
|
||||
/// supplying a suitable `<head>`.
|
||||
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
|
||||
let head = head_element(info);
|
||||
let body = match classify_output(output)? {
|
||||
OutputKind::Html(element) => return Ok(element),
|
||||
OutputKind::Body(body) => body,
|
||||
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
|
||||
};
|
||||
Ok(HtmlElement::new(tag::html)
|
||||
.with_children(vec![head_element(info).into(), body.into()]))
|
||||
Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
|
||||
}
|
||||
|
||||
/// Generate a `<head>` element.
|
||||
@ -302,6 +302,24 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
|
||||
);
|
||||
}
|
||||
|
||||
if !info.author.is_empty() {
|
||||
children.push(
|
||||
HtmlElement::new(tag::meta)
|
||||
.with_attr(attr::name, "authors")
|
||||
.with_attr(attr::content, info.author.join(", "))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
if !info.keywords.is_empty() {
|
||||
children.push(
|
||||
HtmlElement::new(tag::meta)
|
||||
.with_attr(attr::name, "keywords")
|
||||
.with_attr(attr::content, info.keywords.join(", "))
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
HtmlElement::new(tag::head).with_children(children)
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
|
||||
use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size};
|
||||
use typst::model::{Destination, Url};
|
||||
use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind};
|
||||
use typst::visualize::Geometry;
|
||||
use typst::visualize::{Curve, CurveItem, FillRule, Geometry};
|
||||
use typst::WorldExt;
|
||||
|
||||
use crate::IdeWorld;
|
||||
@ -53,10 +53,20 @@ pub fn jump_from_click(
|
||||
for (mut pos, item) in frame.items().rev() {
|
||||
match item {
|
||||
FrameItem::Group(group) => {
|
||||
// TODO: Handle transformation.
|
||||
if let Some(span) =
|
||||
jump_from_click(world, document, &group.frame, click - pos)
|
||||
{
|
||||
let pos = click - pos;
|
||||
if let Some(clip) = &group.clip {
|
||||
if !clip.contains(FillRule::NonZero, pos) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
// Realistic transforms should always be invertible.
|
||||
// An example of one that isn't is a scale of 0, which would
|
||||
// not be clickable anyway.
|
||||
let Some(inv_transform) = group.transform.invert() else {
|
||||
continue;
|
||||
};
|
||||
let pos = pos.transform_inf(inv_transform);
|
||||
if let Some(span) = jump_from_click(world, document, &group.frame, pos) {
|
||||
return Some(span);
|
||||
}
|
||||
}
|
||||
@ -94,12 +104,35 @@ pub fn jump_from_click(
|
||||
}
|
||||
|
||||
FrameItem::Shape(shape, span) => {
|
||||
let Geometry::Rect(size) = shape.geometry else { continue };
|
||||
if is_in_rect(pos, size, click) {
|
||||
if shape.fill.is_some() {
|
||||
let within = match &shape.geometry {
|
||||
Geometry::Line(..) => false,
|
||||
Geometry::Rect(size) => is_in_rect(pos, *size, click),
|
||||
Geometry::Curve(curve) => {
|
||||
curve.contains(shape.fill_rule, click - pos)
|
||||
}
|
||||
};
|
||||
if within {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(stroke) = &shape.stroke {
|
||||
let within = !stroke.thickness.approx_empty() && {
|
||||
// This curve is rooted at (0, 0), not `pos`.
|
||||
let base_curve = match &shape.geometry {
|
||||
Geometry::Line(to) => &Curve(vec![CurveItem::Line(*to)]),
|
||||
Geometry::Rect(size) => &Curve::rect(*size),
|
||||
Geometry::Curve(curve) => curve,
|
||||
};
|
||||
base_curve.stroke_contains(stroke, click - pos)
|
||||
};
|
||||
if within {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
FrameItem::Image(_, size, span) if is_in_rect(pos, *size, click) => {
|
||||
return Jump::from_span(world, *span);
|
||||
}
|
||||
@ -146,9 +179,8 @@ pub fn jump_from_cursor(
|
||||
fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> {
|
||||
for (mut pos, item) in frame.items() {
|
||||
if let FrameItem::Group(group) = item {
|
||||
// TODO: Handle transformation.
|
||||
if let Some(point) = find_in_frame(&group.frame, span) {
|
||||
return Some(point + pos);
|
||||
return Some(pos + point.transform(group.transform));
|
||||
}
|
||||
}
|
||||
|
||||
@ -269,6 +301,97 @@ mod tests {
|
||||
test_click("$a + b$", point(28.0, 14.0), cursor(5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_transform_clip() {
|
||||
let margin = point(10.0, 10.0);
|
||||
test_click(
|
||||
"#rect(width: 20pt, height: 20pt, fill: black)",
|
||||
point(10.0, 10.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
test_click(
|
||||
"#rect(width: 60pt, height: 10pt, fill: black)",
|
||||
point(5.0, 30.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#rotate(90deg, origin: bottom + left, rect(width: 60pt, height: 10pt, fill: black))",
|
||||
point(5.0, 30.0) + margin,
|
||||
cursor(38),
|
||||
);
|
||||
test_click(
|
||||
"#scale(x: 300%, y: 300%, origin: top + left, rect(width: 10pt, height: 10pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
cursor(45),
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: true, scale(x: 300%, y: 300%, \
|
||||
origin: top + left, rect(width: 10pt, height: 10pt, fill: black)))",
|
||||
point(20.0, 20.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: false, rect(width: 30pt, height: 30pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
cursor(45),
|
||||
);
|
||||
test_click(
|
||||
"#box(width: 10pt, height: 10pt, clip: true, rect(width: 30pt, height: 30pt, fill: black))",
|
||||
point(20.0, 20.0) + margin,
|
||||
None,
|
||||
);
|
||||
test_click(
|
||||
"#rotate(90deg, origin: bottom + left)[hello world]",
|
||||
point(5.0, 15.0) + margin,
|
||||
cursor(40),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_shapes() {
|
||||
let margin = point(10.0, 10.0);
|
||||
|
||||
test_click(
|
||||
"#rect(width: 30pt, height: 30pt, fill: black)",
|
||||
point(15.0, 15.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
|
||||
let circle = "#circle(width: 30pt, height: 30pt, fill: black)";
|
||||
test_click(circle, point(15.0, 15.0) + margin, cursor(1));
|
||||
test_click(circle, point(1.0, 1.0) + margin, None);
|
||||
|
||||
let bowtie =
|
||||
"#polygon(fill: black, (0pt, 0pt), (20pt, 20pt), (20pt, 0pt), (0pt, 20pt))";
|
||||
test_click(bowtie, point(1.0, 2.0) + margin, cursor(1));
|
||||
test_click(bowtie, point(2.0, 1.0) + margin, None);
|
||||
test_click(bowtie, point(19.0, 10.0) + margin, cursor(1));
|
||||
|
||||
let evenodd = r#"#polygon(fill: black, fill-rule: "even-odd",
|
||||
(0pt, 10pt), (30pt, 10pt), (30pt, 20pt), (20pt, 20pt),
|
||||
(20pt, 0pt), (10pt, 0pt), (10pt, 30pt), (20pt, 30pt),
|
||||
(20pt, 20pt), (0pt, 20pt))"#;
|
||||
test_click(evenodd, point(15.0, 15.0) + margin, None);
|
||||
test_click(evenodd, point(5.0, 15.0) + margin, cursor(1));
|
||||
test_click(evenodd, point(15.0, 5.0) + margin, cursor(1));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_click_shapes_stroke() {
|
||||
let margin = point(10.0, 10.0);
|
||||
|
||||
let rect =
|
||||
"#place(dx: 10pt, dy: 10pt, rect(width: 10pt, height: 10pt, stroke: 5pt))";
|
||||
test_click(rect, point(15.0, 15.0) + margin, None);
|
||||
test_click(rect, point(10.0, 15.0) + margin, cursor(27));
|
||||
|
||||
test_click(
|
||||
"#line(angle: 45deg, length: 10pt, stroke: 2pt)",
|
||||
point(2.0, 2.0) + margin,
|
||||
cursor(1),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_cursor() {
|
||||
let s = "*Hello* #box[ABC] World";
|
||||
@ -281,6 +404,15 @@ mod tests {
|
||||
test_cursor("$a + b$", -3, pos(1, 27.51, 16.83));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_jump_from_cursor_transform() {
|
||||
test_cursor(
|
||||
r#"#rotate(90deg, origin: bottom + left, [hello world])"#,
|
||||
-5,
|
||||
pos(1, 10.0, 16.58),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_backlink() {
|
||||
let s = "#footnote[Hi]";
|
||||
|
@ -97,7 +97,7 @@ impl World for TestWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
Some(self.base.fonts[index].clone())
|
||||
self.base.fonts.get(index).cloned()
|
||||
}
|
||||
|
||||
fn today(&self, _: Option<i64>) -> Option<Datetime> {
|
||||
|
@ -13,7 +13,7 @@ use typst_library::layout::{
|
||||
use typst_library::text::TextElem;
|
||||
use typst_library::visualize::Geometry;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::{MaybeReverseIter, Numeric};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use super::{
|
||||
generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row,
|
||||
@ -755,7 +755,7 @@ impl<'a> GridLayouter<'a> {
|
||||
|
||||
// Reverse with RTL so that later columns start first.
|
||||
let mut dx = Abs::zero();
|
||||
for (x, &col) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
for (x, &col) in self.rcols.iter().enumerate() {
|
||||
let mut dy = Abs::zero();
|
||||
for row in rows {
|
||||
// We want to only draw the fill starting at the parent
|
||||
@ -824,18 +824,13 @@ impl<'a> GridLayouter<'a> {
|
||||
.sum()
|
||||
};
|
||||
let width = self.cell_spanned_width(cell, x);
|
||||
// In the grid, cell colspans expand to the right,
|
||||
// so we're at the leftmost (lowest 'x') column
|
||||
// spanned by the cell. However, in RTL, cells
|
||||
// expand to the left. Therefore, without the
|
||||
// offset below, cell fills would start at the
|
||||
// rightmost visual position of a cell and extend
|
||||
// over to unrelated columns to the right in RTL.
|
||||
// We avoid this by ensuring the fill starts at the
|
||||
// very left of the cell, even with colspan > 1.
|
||||
let offset =
|
||||
if self.is_rtl { -width + col } else { Abs::zero() };
|
||||
let pos = Point::new(dx + offset, dy);
|
||||
let mut pos = Point::new(dx, dy);
|
||||
if self.is_rtl {
|
||||
// In RTL cells expand to the left, thus the
|
||||
// position must additionally be offset by the
|
||||
// cell's width.
|
||||
pos.x = self.width - (dx + width);
|
||||
}
|
||||
let size = Size::new(width, height);
|
||||
let rect = Geometry::Rect(size).filled(fill);
|
||||
fills.push((pos, FrameItem::Shape(rect, self.span)));
|
||||
@ -1415,10 +1410,9 @@ impl<'a> GridLayouter<'a> {
|
||||
}
|
||||
|
||||
let mut output = Frame::soft(Size::new(self.width, height));
|
||||
let mut pos = Point::zero();
|
||||
let mut offset = Point::zero();
|
||||
|
||||
// Reverse the column order when using RTL.
|
||||
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
for (x, &rcol) in self.rcols.iter().enumerate() {
|
||||
if let Some(cell) = self.grid.cell(x, y) {
|
||||
// Rowspans have a separate layout step
|
||||
if cell.rowspan.get() == 1 {
|
||||
@ -1436,25 +1430,17 @@ impl<'a> GridLayouter<'a> {
|
||||
let frame =
|
||||
layout_cell(cell, engine, disambiguator, self.styles, pod)?
|
||||
.into_frame();
|
||||
let mut pos = pos;
|
||||
let mut pos = offset;
|
||||
if self.is_rtl {
|
||||
// In the grid, cell colspans expand to the right,
|
||||
// so we're at the leftmost (lowest 'x') column
|
||||
// spanned by the cell. However, in RTL, cells
|
||||
// expand to the left. Therefore, without the
|
||||
// offset below, the cell's contents would be laid out
|
||||
// starting at its rightmost visual position and extend
|
||||
// over to unrelated cells to its right in RTL.
|
||||
// We avoid this by ensuring the rendered cell starts at
|
||||
// the very left of the cell, even with colspan > 1.
|
||||
let offset = -width + rcol;
|
||||
pos.x += offset;
|
||||
// In RTL cells expand to the left, thus the position
|
||||
// must additionally be offset by the cell's width.
|
||||
pos.x = self.width - (pos.x + width);
|
||||
}
|
||||
output.push_frame(pos, frame);
|
||||
}
|
||||
}
|
||||
|
||||
pos.x += rcol;
|
||||
offset.x += rcol;
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
@ -1481,8 +1467,8 @@ impl<'a> GridLayouter<'a> {
|
||||
pod.backlog = &heights[1..];
|
||||
|
||||
// Layout the row.
|
||||
let mut pos = Point::zero();
|
||||
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
|
||||
let mut offset = Point::zero();
|
||||
for (x, &rcol) in self.rcols.iter().enumerate() {
|
||||
if let Some(cell) = self.grid.cell(x, y) {
|
||||
// Rowspans have a separate layout step
|
||||
if cell.rowspan.get() == 1 {
|
||||
@ -1493,17 +1479,19 @@ impl<'a> GridLayouter<'a> {
|
||||
let fragment =
|
||||
layout_cell(cell, engine, disambiguator, self.styles, pod)?;
|
||||
for (output, frame) in outputs.iter_mut().zip(fragment) {
|
||||
let mut pos = pos;
|
||||
let mut pos = offset;
|
||||
if self.is_rtl {
|
||||
let offset = -width + rcol;
|
||||
pos.x += offset;
|
||||
// In RTL cells expand to the left, thus the
|
||||
// position must additionally be offset by the
|
||||
// cell's width.
|
||||
pos.x = self.width - (offset.x + width);
|
||||
}
|
||||
output.push_frame(pos, frame);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pos.x += rcol;
|
||||
offset.x += rcol;
|
||||
}
|
||||
|
||||
Ok(Fragment::frames(outputs))
|
||||
|
@ -3,7 +3,6 @@ use typst_library::engine::Engine;
|
||||
use typst_library::foundations::Resolve;
|
||||
use typst_library::layout::grid::resolve::Repeatable;
|
||||
use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing};
|
||||
use typst_utils::MaybeReverseIter;
|
||||
|
||||
use super::layouter::{may_progress_with_offset, points, Row};
|
||||
use super::{layout_cell, Cell, GridLayouter};
|
||||
@ -23,6 +22,10 @@ pub struct Rowspan {
|
||||
/// specified for the parent cell's `breakable` field.
|
||||
pub is_effectively_unbreakable: bool,
|
||||
/// The horizontal offset of this rowspan in all regions.
|
||||
///
|
||||
/// This is the offset from the text direction start, meaning that, on RTL
|
||||
/// grids, this is the offset from the right of the grid, whereas, on LTR
|
||||
/// grids, it is the offset from the left.
|
||||
pub dx: Abs,
|
||||
/// The vertical offset of this rowspan in the first region.
|
||||
pub dy: Abs,
|
||||
@ -118,10 +121,11 @@ impl GridLayouter<'_> {
|
||||
// Nothing to layout.
|
||||
return Ok(());
|
||||
};
|
||||
let first_column = self.rcols[x];
|
||||
let cell = self.grid.cell(x, y).unwrap();
|
||||
let width = self.cell_spanned_width(cell, x);
|
||||
let dx = if self.is_rtl { dx - width + first_column } else { dx };
|
||||
// In RTL cells expand to the left, thus the position
|
||||
// must additionally be offset by the cell's width.
|
||||
let dx = if self.is_rtl { self.width - (dx + width) } else { dx };
|
||||
|
||||
// Prepare regions.
|
||||
let size = Size::new(width, *first_height);
|
||||
@ -192,10 +196,8 @@ impl GridLayouter<'_> {
|
||||
/// Checks if a row contains the beginning of one or more rowspan cells.
|
||||
/// If so, adds them to the rowspans vector.
|
||||
pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) {
|
||||
// We will compute the horizontal offset of each rowspan in advance.
|
||||
// For that reason, we must reverse the column order when using RTL.
|
||||
let offsets = points(self.rcols.iter().copied().rev_if(self.is_rtl));
|
||||
for (x, dx) in (0..self.rcols.len()).rev_if(self.is_rtl).zip(offsets) {
|
||||
let offsets = points(self.rcols.iter().copied());
|
||||
for (x, dx) in (0..self.rcols.len()).zip(offsets) {
|
||||
let Some(cell) = self.grid.cell(x, y) else {
|
||||
continue;
|
||||
};
|
||||
|
@ -824,12 +824,42 @@ fn shape_segment<'a>(
|
||||
|
||||
// Add the glyph to the shaped output.
|
||||
if info.glyph_id != 0 && is_covered(cluster) {
|
||||
// Determine the text range of the glyph.
|
||||
// Assume we have the following sequence of (glyph_id, cluster):
|
||||
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
|
||||
// (63, 13), (193, 25), (80, 25), (3, 31)
|
||||
//
|
||||
// We then want the sequence of (glyph_id, text_range) to look as follows:
|
||||
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
|
||||
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
|
||||
//
|
||||
// Each glyph in the same cluster should be assigned the full text
|
||||
// range. This is necessary because only this way krilla can
|
||||
// properly assign `ActualText` attributes in complex shaping
|
||||
// scenarios.
|
||||
|
||||
// The start of the glyph's text range.
|
||||
let start = base + cluster;
|
||||
let end = base
|
||||
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) }
|
||||
.and_then(|last| infos.get(last))
|
||||
.map_or(text.len(), |info| info.cluster as usize);
|
||||
|
||||
// Determine the end of the glyph's text range.
|
||||
let mut k = i;
|
||||
let step: isize = if ltr { 1 } else { -1 };
|
||||
let end = loop {
|
||||
// If we've reached the end of the glyphs, the `end` of the
|
||||
// range should be the end of the full text.
|
||||
let Some((next, next_info)) = k
|
||||
.checked_add_signed(step)
|
||||
.and_then(|n| infos.get(n).map(|info| (n, info)))
|
||||
else {
|
||||
break base + text.len();
|
||||
};
|
||||
|
||||
// If the cluster doesn't match anymore, we've reached the end.
|
||||
if next_info.cluster != info.cluster {
|
||||
break base + next_info.cluster as usize;
|
||||
}
|
||||
|
||||
k = next;
|
||||
};
|
||||
|
||||
let c = text[cluster..].chars().next().unwrap();
|
||||
let script = c.script();
|
||||
|
@ -19,9 +19,11 @@ pub fn layout_accent(
|
||||
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?;
|
||||
|
||||
// Try to replace a glyph with its dotless variant.
|
||||
if elem.dotless(styles) {
|
||||
if let MathFragment::Glyph(glyph) = &mut base {
|
||||
glyph.make_dotless_form(ctx);
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve class to preserve automatic spacing.
|
||||
let base_class = base.class();
|
||||
|
@ -1,4 +1,4 @@
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::diag::{bail, warning, SourceResult};
|
||||
use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
|
||||
use typst_library::layout::{
|
||||
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
|
||||
@ -9,7 +9,7 @@ use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use super::{
|
||||
alignments, delimiter_alignment, stack, style_for_denominator, AlignmentResult,
|
||||
alignments, delimiter_alignment, style_for_denominator, AlignmentResult,
|
||||
FrameFragment, GlyphFragment, LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
|
||||
};
|
||||
|
||||
@ -23,67 +23,23 @@ pub fn layout_vec(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_vec_body(
|
||||
let span = elem.span();
|
||||
|
||||
let column: Vec<&Content> = elem.children.iter().collect();
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&elem.children,
|
||||
&[column],
|
||||
elem.align(styles),
|
||||
elem.gap(styles),
|
||||
LeftRightAlternator::Right,
|
||||
None,
|
||||
Axes::with_y(elem.gap(styles)),
|
||||
span,
|
||||
"elements",
|
||||
)?;
|
||||
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
|
||||
}
|
||||
|
||||
/// Lays out a [`MatElem`].
|
||||
#[typst_macros::time(name = "math.mat", span = elem.span())]
|
||||
pub fn layout_mat(
|
||||
elem: &Packed<MatElem>,
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let augment = elem.augment(styles);
|
||||
let rows = &elem.rows;
|
||||
|
||||
if let Some(aug) = &augment {
|
||||
for &offset in &aug.hline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= rows.len() {
|
||||
bail!(
|
||||
elem.span(),
|
||||
"cannot draw a horizontal line after row {} of a matrix with {} rows",
|
||||
if offset < 0 { rows.len() as isize + offset } else { offset },
|
||||
rows.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
|
||||
for &offset in &aug.vline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= ncols {
|
||||
bail!(
|
||||
elem.span(),
|
||||
"cannot draw a vertical line after column {} of a matrix with {} columns",
|
||||
if offset < 0 { ncols as isize + offset } else { offset },
|
||||
ncols
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_mat_body(
|
||||
ctx,
|
||||
styles,
|
||||
rows,
|
||||
elem.align(styles),
|
||||
augment,
|
||||
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
|
||||
elem.span(),
|
||||
)?;
|
||||
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
|
||||
}
|
||||
|
||||
/// Lays out a [`CasesElem`].
|
||||
@ -93,60 +49,100 @@ pub fn layout_cases(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
let delim = elem.delim(styles);
|
||||
let frame = layout_vec_body(
|
||||
let span = elem.span();
|
||||
|
||||
let column: Vec<&Content> = elem.children.iter().collect();
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&elem.children,
|
||||
&[column],
|
||||
FixedAlignment::Start,
|
||||
elem.gap(styles),
|
||||
LeftRightAlternator::None,
|
||||
None,
|
||||
Axes::with_y(elem.gap(styles)),
|
||||
span,
|
||||
"branches",
|
||||
)?;
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
let (open, close) =
|
||||
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
|
||||
|
||||
layout_delimiters(ctx, styles, frame, open, close, elem.span())
|
||||
layout_delimiters(ctx, styles, frame, open, close, span)
|
||||
}
|
||||
|
||||
/// Layout the inner contents of a vector.
|
||||
fn layout_vec_body(
|
||||
/// Lays out a [`MatElem`].
|
||||
#[typst_macros::time(name = "math.mat", span = elem.span())]
|
||||
pub fn layout_mat(
|
||||
elem: &Packed<MatElem>,
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
column: &[Content],
|
||||
) -> SourceResult<()> {
|
||||
let span = elem.span();
|
||||
let rows = &elem.rows;
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
|
||||
let augment = elem.augment(styles);
|
||||
if let Some(aug) = &augment {
|
||||
for &offset in &aug.hline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= rows.len() {
|
||||
bail!(
|
||||
span,
|
||||
"cannot draw a horizontal line after row {} of a matrix with {} rows",
|
||||
if offset < 0 { rows.len() as isize + offset } else { offset },
|
||||
rows.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for &offset in &aug.vline.0 {
|
||||
if offset == 0 || offset.unsigned_abs() >= ncols {
|
||||
bail!(
|
||||
span,
|
||||
"cannot draw a vertical line after column {} of a matrix with {} columns",
|
||||
if offset < 0 { ncols as isize + offset } else { offset },
|
||||
ncols
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Transpose rows of the matrix into columns.
|
||||
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
|
||||
let columns: Vec<Vec<_>> = (0..ncols)
|
||||
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
|
||||
.collect();
|
||||
|
||||
let frame = layout_body(
|
||||
ctx,
|
||||
styles,
|
||||
&columns,
|
||||
elem.align(styles),
|
||||
LeftRightAlternator::Right,
|
||||
augment,
|
||||
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
|
||||
span,
|
||||
"cells",
|
||||
)?;
|
||||
|
||||
let delim = elem.delim(styles);
|
||||
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
|
||||
}
|
||||
|
||||
/// Layout the inner contents of a matrix, vector, or cases.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_body(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
columns: &[Vec<&Content>],
|
||||
align: FixedAlignment,
|
||||
row_gap: Rel<Abs>,
|
||||
alternator: LeftRightAlternator,
|
||||
) -> SourceResult<Frame> {
|
||||
let gap = row_gap.relative_to(ctx.region.size.y);
|
||||
|
||||
let denom_style = style_for_denominator(styles);
|
||||
let mut flat = vec![];
|
||||
for child in column {
|
||||
// We allow linebreaks in cases and vectors, which are functionally
|
||||
// identical to commas.
|
||||
flat.extend(ctx.layout_into_run(child, styles.chain(&denom_style))?.rows());
|
||||
}
|
||||
// We pad ascent and descent with the ascent and descent of the paren
|
||||
// to ensure that normal vectors are aligned with others unless they are
|
||||
// way too big.
|
||||
let paren =
|
||||
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
|
||||
Ok(stack(flat, align, gap, 0, alternator, Some((paren.ascent, paren.descent))))
|
||||
}
|
||||
|
||||
/// Layout the inner contents of a matrix.
|
||||
fn layout_mat_body(
|
||||
ctx: &mut MathContext,
|
||||
styles: StyleChain,
|
||||
rows: &[Vec<Content>],
|
||||
align: FixedAlignment,
|
||||
augment: Option<Augment<Abs>>,
|
||||
gap: Axes<Rel<Abs>>,
|
||||
span: Span,
|
||||
children: &str,
|
||||
) -> SourceResult<Frame> {
|
||||
let ncols = rows.first().map_or(0, |row| row.len());
|
||||
let nrows = rows.len();
|
||||
let nrows = columns.first().map_or(0, |col| col.len());
|
||||
let ncols = columns.len();
|
||||
if ncols == 0 || nrows == 0 {
|
||||
return Ok(Frame::soft(Size::zero()));
|
||||
}
|
||||
@ -178,16 +174,11 @@ fn layout_mat_body(
|
||||
// Before the full matrix body can be laid out, the
|
||||
// individual cells must first be independently laid out
|
||||
// so we can ensure alignment across rows and columns.
|
||||
let mut cols = vec![vec![]; ncols];
|
||||
|
||||
// This variable stores the maximum ascent and descent for each row.
|
||||
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
|
||||
|
||||
// We want to transpose our data layout to columns
|
||||
// before final layout. For efficiency, the columns
|
||||
// variable is set up here and newly generated
|
||||
// individual cells are then added to it.
|
||||
let mut cols = vec![vec![]; ncols];
|
||||
|
||||
let denom_style = style_for_denominator(styles);
|
||||
// We pad ascent and descent with the ascent and descent of the paren
|
||||
// to ensure that normal matrices are aligned with others unless they are
|
||||
@ -195,10 +186,22 @@ fn layout_mat_body(
|
||||
let paren =
|
||||
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached());
|
||||
|
||||
for (row, (ascent, descent)) in rows.iter().zip(&mut heights) {
|
||||
for (cell, col) in row.iter().zip(&mut cols) {
|
||||
for (column, col) in columns.iter().zip(&mut cols) {
|
||||
for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
|
||||
let cell_span = cell.span();
|
||||
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
|
||||
|
||||
// We ignore linebreaks in the cells as we can't differentiate
|
||||
// alignment points for the whole body from ones for a specific
|
||||
// cell, and multiline cells don't quite make sense at the moment.
|
||||
if cell.is_multiline() {
|
||||
ctx.engine.sink.warn(warning!(
|
||||
cell_span,
|
||||
"linebreaks are ignored in {}", children;
|
||||
hint: "use commas instead to separate each line"
|
||||
));
|
||||
}
|
||||
|
||||
ascent.set_max(cell.ascent().max(paren.ascent));
|
||||
descent.set_max(cell.descent().max(paren.descent));
|
||||
|
||||
@ -222,7 +225,7 @@ fn layout_mat_body(
|
||||
let mut y = Abs::zero();
|
||||
|
||||
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
|
||||
let cell = cell.into_line_frame(&points, LeftRightAlternator::Right);
|
||||
let cell = cell.into_line_frame(&points, alternator);
|
||||
let pos = Point::new(
|
||||
if points.is_empty() {
|
||||
x + align.position(rcol - cell.width())
|
||||
|
@ -117,7 +117,6 @@ pub fn stack(
|
||||
gap: Abs,
|
||||
baseline: usize,
|
||||
alternator: LeftRightAlternator,
|
||||
minimum_ascent_descent: Option<(Abs, Abs)>,
|
||||
) -> Frame {
|
||||
let AlignmentResult { points, width } = alignments(&rows);
|
||||
let rows: Vec<_> = rows
|
||||
@ -125,13 +124,9 @@ pub fn stack(
|
||||
.map(|row| row.into_line_frame(&points, alternator))
|
||||
.collect();
|
||||
|
||||
let padded_height = |height: Abs| {
|
||||
height.max(minimum_ascent_descent.map_or(Abs::zero(), |(a, d)| a + d))
|
||||
};
|
||||
|
||||
let mut frame = Frame::soft(Size::new(
|
||||
width,
|
||||
rows.iter().map(|row| padded_height(row.height())).sum::<Abs>()
|
||||
rows.iter().map(|row| row.height()).sum::<Abs>()
|
||||
+ rows.len().saturating_sub(1) as f64 * gap,
|
||||
));
|
||||
|
||||
@ -142,14 +137,11 @@ pub fn stack(
|
||||
} else {
|
||||
Abs::zero()
|
||||
};
|
||||
let ascent_padded_part = minimum_ascent_descent
|
||||
.map_or(Abs::zero(), |(a, _)| (a - row.ascent()))
|
||||
.max(Abs::zero());
|
||||
let pos = Point::new(x, y + ascent_padded_part);
|
||||
let pos = Point::new(x, y);
|
||||
if i == baseline {
|
||||
frame.set_baseline(y + row.baseline() + ascent_padded_part);
|
||||
frame.set_baseline(y + row.baseline());
|
||||
}
|
||||
y += padded_height(row.height()) + gap;
|
||||
y += row.height() + gap;
|
||||
frame.push_frame(pos, row);
|
||||
}
|
||||
|
||||
|
@ -312,14 +312,8 @@ fn layout_underoverspreader(
|
||||
}
|
||||
};
|
||||
|
||||
let frame = stack(
|
||||
rows,
|
||||
FixedAlignment::Center,
|
||||
gap,
|
||||
baseline,
|
||||
LeftRightAlternator::Right,
|
||||
None,
|
||||
);
|
||||
let frame =
|
||||
stack(rows, FixedAlignment::Center, gap, baseline, LeftRightAlternator::Right);
|
||||
ctx.push(FrameFragment::new(styles, frame).with_class(body_class));
|
||||
|
||||
Ok(())
|
||||
|
@ -29,6 +29,7 @@ csv = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
fontdb = { workspace = true }
|
||||
glidesort = { workspace = true }
|
||||
hayagriva = { workspace = true }
|
||||
icu_properties = { workspace = true }
|
||||
icu_provider = { workspace = true }
|
||||
|
@ -172,17 +172,29 @@ impl Array {
|
||||
}
|
||||
|
||||
/// Returns the first item in the array. May be used on the left-hand side
|
||||
/// of an assignment. Fails with an error if the array is empty.
|
||||
/// an assignment. Returns the default value if the array is empty
|
||||
/// or fails with an error is no default value was specified.
|
||||
#[func]
|
||||
pub fn first(&self) -> StrResult<Value> {
|
||||
self.0.first().cloned().ok_or_else(array_is_empty)
|
||||
pub fn first(
|
||||
&self,
|
||||
/// A default value to return if the array is empty.
|
||||
#[named]
|
||||
default: Option<Value>,
|
||||
) -> StrResult<Value> {
|
||||
self.0.first().cloned().or(default).ok_or_else(array_is_empty)
|
||||
}
|
||||
|
||||
/// Returns the last item in the array. May be used on the left-hand side of
|
||||
/// an assignment. Fails with an error if the array is empty.
|
||||
/// an assignment. Returns the default value if the array is empty
|
||||
/// or fails with an error is no default value was specified.
|
||||
#[func]
|
||||
pub fn last(&self) -> StrResult<Value> {
|
||||
self.0.last().cloned().ok_or_else(array_is_empty)
|
||||
pub fn last(
|
||||
&self,
|
||||
/// A default value to return if the array is empty.
|
||||
#[named]
|
||||
default: Option<Value>,
|
||||
) -> StrResult<Value> {
|
||||
self.0.last().cloned().or(default).ok_or_else(array_is_empty)
|
||||
}
|
||||
|
||||
/// Returns the item at the specified index in the array. May be used on the
|
||||
@ -796,7 +808,7 @@ impl Array {
|
||||
/// function. The sorting algorithm used is stable.
|
||||
///
|
||||
/// Returns an error if two values could not be compared or if the key
|
||||
/// function (if given) yields an error.
|
||||
/// or comparison function (if given) yields an error.
|
||||
///
|
||||
/// To sort according to multiple criteria at once, e.g. in case of equality
|
||||
/// between some criteria, the key function can return an array. The results
|
||||
@ -820,17 +832,116 @@ impl Array {
|
||||
/// determine the keys to sort by.
|
||||
#[named]
|
||||
key: Option<Func>,
|
||||
/// If given, uses this function to compare elements in the array.
|
||||
///
|
||||
/// This function should return a boolean: `{true}` indicates that the
|
||||
/// elements are in order, while `{false}` indicates that they should be
|
||||
/// swapped. To keep the sort stable, if the two elements are equal, the
|
||||
/// function should return `{true}`.
|
||||
///
|
||||
/// If this function does not order the elements properly (e.g., by
|
||||
/// returning `{false}` for both `{(x, y)}` and `{(y, x)}`, or for
|
||||
/// `{(x, x)}`), the resulting array will be in unspecified order.
|
||||
///
|
||||
/// When used together with `key`, `by` will be passed the keys instead
|
||||
/// of the elements.
|
||||
///
|
||||
/// ```example
|
||||
/// #(
|
||||
/// "sorted",
|
||||
/// "by",
|
||||
/// "decreasing",
|
||||
/// "length",
|
||||
/// ).sorted(
|
||||
/// key: s => s.len(),
|
||||
/// by: (l, r) => l >= r,
|
||||
/// )
|
||||
/// ```
|
||||
#[named]
|
||||
by: Option<Func>,
|
||||
) -> SourceResult<Array> {
|
||||
match by {
|
||||
Some(by) => {
|
||||
let mut are_in_order = |mut x, mut y| {
|
||||
if let Some(f) = &key {
|
||||
// We rely on `comemo`'s memoization of function
|
||||
// evaluation to not excessively reevaluate the key.
|
||||
x = f.call(engine, context, [x])?;
|
||||
y = f.call(engine, context, [y])?;
|
||||
}
|
||||
match by.call(engine, context, [x, y])? {
|
||||
Value::Bool(b) => Ok(b),
|
||||
x => {
|
||||
bail!(
|
||||
span,
|
||||
"expected boolean from `by` function, got {}",
|
||||
x.ty(),
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
// If a comparison function is provided, we use `glidesort`
|
||||
// instead of the standard library sorting algorithm to prevent
|
||||
// panics in case the comparison function does not define a
|
||||
// valid order (see https://github.com/typst/typst/pull/5627).
|
||||
let mut result = Ok(());
|
||||
let mut vec = self.0;
|
||||
let mut vec = self.0.into_iter().enumerate().collect::<Vec<_>>();
|
||||
glidesort::sort_by(&mut vec, |(i, x), (j, y)| {
|
||||
// Because we use booleans for the comparison function, in
|
||||
// order to keep the sort stable, we need to compare in the
|
||||
// right order.
|
||||
if i < j {
|
||||
// If `x` and `y` appear in this order in the original
|
||||
// array, then we should change their order (i.e.,
|
||||
// return `Ordering::Greater`) iff `y` is strictly less
|
||||
// than `x` (i.e., `compare(x, y)` returns `false`).
|
||||
// Otherwise, we should keep them in the same order
|
||||
// (i.e., return `Ordering::Less`).
|
||||
match are_in_order(x.clone(), y.clone()) {
|
||||
Ok(false) => Ordering::Greater,
|
||||
Ok(true) => Ordering::Less,
|
||||
Err(err) => {
|
||||
if result.is_ok() {
|
||||
result = Err(err);
|
||||
}
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If `x` and `y` appear in the opposite order in the
|
||||
// original array, then we should change their order
|
||||
// (i.e., return `Ordering::Less`) iff `x` is strictly
|
||||
// less than `y` (i.e., `compare(y, x)` returns
|
||||
// `false`). Otherwise, we should keep them in the same
|
||||
// order (i.e., return `Ordering::Less`).
|
||||
match are_in_order(y.clone(), x.clone()) {
|
||||
Ok(false) => Ordering::Less,
|
||||
Ok(true) => Ordering::Greater,
|
||||
Err(err) => {
|
||||
if result.is_ok() {
|
||||
result = Err(err);
|
||||
}
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
result.map(|()| vec.into_iter().map(|(_, x)| x).collect())
|
||||
}
|
||||
|
||||
None => {
|
||||
let mut key_of = |x: Value| match &key {
|
||||
// NOTE: We are relying on `comemo`'s memoization of function
|
||||
// evaluation to not excessively reevaluate the `key`.
|
||||
// We rely on `comemo`'s memoization of function evaluation
|
||||
// to not excessively reevaluate the key.
|
||||
Some(f) => f.call(engine, context, [x]),
|
||||
None => Ok(x),
|
||||
};
|
||||
// If no comparison function is provided, we know the order is
|
||||
// valid, so we can use the standard library sort and prevent an
|
||||
// extra allocation.
|
||||
let mut result = Ok(());
|
||||
let mut vec = self.0;
|
||||
vec.make_mut().sort_by(|a, b| {
|
||||
// Until we get `try` blocks :)
|
||||
match (key_of(a.clone()), key_of(b.clone())) {
|
||||
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
|
||||
if result.is_ok() {
|
||||
@ -846,7 +957,9 @@ impl Array {
|
||||
}
|
||||
}
|
||||
});
|
||||
result.map(|_| vec.into())
|
||||
result.map(|()| vec.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates all items in the array.
|
||||
|
@ -77,6 +77,7 @@ pub use {
|
||||
indexmap::IndexMap,
|
||||
};
|
||||
|
||||
use comemo::TrackedMut;
|
||||
use ecow::EcoString;
|
||||
use typst_syntax::Spanned;
|
||||
|
||||
@ -297,5 +298,14 @@ pub fn eval(
|
||||
for (key, value) in dict {
|
||||
scope.bind(key.into(), Binding::new(value, span));
|
||||
}
|
||||
(engine.routines.eval_string)(engine.routines, engine.world, &text, span, mode, scope)
|
||||
|
||||
(engine.routines.eval_string)(
|
||||
engine.routines,
|
||||
engine.world,
|
||||
TrackedMut::reborrow_mut(&mut engine.sink),
|
||||
&text,
|
||||
span,
|
||||
mode,
|
||||
scope,
|
||||
)
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use typst_syntax::FileId;
|
||||
use crate::diag::{bail, DeprecationSink, StrResult};
|
||||
use crate::foundations::{repr, ty, Content, Scope, Value};
|
||||
|
||||
/// An module of definitions.
|
||||
/// A module of definitions.
|
||||
///
|
||||
/// A module
|
||||
/// - be built-in
|
||||
|
@ -39,11 +39,25 @@ use crate::foundations::{
|
||||
/// #type(image("glacier.jpg")).
|
||||
/// ```
|
||||
///
|
||||
/// The type of `10` is `int`. Now, what is the type of `int` or even `type`?
|
||||
/// The type of `{10}` is `int`. Now, what is the type of `int` or even `type`?
|
||||
/// ```example
|
||||
/// #type(int) \
|
||||
/// #type(type)
|
||||
/// ```
|
||||
///
|
||||
/// Unlike other types like `int`, [none] and [auto] do not have a name
|
||||
/// representing them. To test if a value is one of these, compare your value to
|
||||
/// them directly, e.g:
|
||||
/// ```example
|
||||
/// #let val = none
|
||||
/// #if val == none [
|
||||
/// Yep, it's none.
|
||||
/// ]
|
||||
/// ```
|
||||
///
|
||||
/// Note that `type` will return [`content`] for all document elements. To
|
||||
/// programmatically determine which kind of content you are dealing with, see
|
||||
/// [`content.func`].
|
||||
#[ty(scope, cast)]
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Type(Static<NativeTypeData>);
|
||||
|
@ -22,7 +22,8 @@ use crate::layout::{BlockElem, Size};
|
||||
/// #let text = lorem(30)
|
||||
/// #layout(size => [
|
||||
/// #let (height,) = measure(
|
||||
/// block(width: size.width, text),
|
||||
/// width: size.width,
|
||||
/// text,
|
||||
/// )
|
||||
/// This text is #height high with
|
||||
/// the current page width: \
|
||||
|
@ -8,15 +8,35 @@ use crate::foundations::{repr, ty, Repr};
|
||||
|
||||
/// A ratio of a whole.
|
||||
///
|
||||
/// Written as a number, followed by a percent sign.
|
||||
/// A ratio is written as a number, followed by a percent sign. Ratios most
|
||||
/// often appear as part of a [relative length]($relative), to specify the size
|
||||
/// of some layout element relative to the page or some container.
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #set align(center)
|
||||
/// #scale(x: 150%)[
|
||||
/// Scaled apart.
|
||||
/// ]
|
||||
/// #rect(width: 25%)
|
||||
/// ```
|
||||
///
|
||||
/// However, they can also describe any other property that is relative to some
|
||||
/// base, e.g. an amount of [horizontal scaling]($scale.x) or the
|
||||
/// [height of parentheses]($math.lr.size) relative to the height of the content
|
||||
/// they enclose.
|
||||
///
|
||||
/// # Scripting
|
||||
/// Within your own code, you can use ratios as you like. You can multiply them
|
||||
/// with various other types as shown below:
|
||||
///
|
||||
/// | Multiply by | Example | Result |
|
||||
/// |-----------------|-------------------------|-----------------|
|
||||
/// | [`ratio`] | `{27% * 10%}` | `{2.7%}` |
|
||||
/// | [`length`] | `{27% * 100pt}` | `{27pt}` |
|
||||
/// | [`relative`] | `{27% * (10% + 100pt)}` | `{2.7% + 27pt}` |
|
||||
/// | [`angle`] | `{27% * 100deg}` | `{27deg}` |
|
||||
/// | [`int`] | `{27% * 2}` | `{54%}` |
|
||||
/// | [`float`] | `{27% * 0.37037}` | `{10%}` |
|
||||
/// | [`fraction`] | `{27% * 3fr}` | `{0.81fr}` |
|
||||
///
|
||||
/// When ratios are displayed in the document, they are rounded to two
|
||||
/// significant digits for readability.
|
||||
#[ty(cast)]
|
||||
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct Ratio(Scalar);
|
||||
|
@ -14,17 +14,58 @@ use crate::layout::{Abs, Em, Length, Ratio};
|
||||
/// addition and subtraction of a length and a ratio. Wherever a relative length
|
||||
/// is expected, you can also use a bare length or ratio.
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #rect(width: 100% - 50pt)
|
||||
/// # Relative to the page
|
||||
/// A common use case is setting the width or height of a layout element (e.g.,
|
||||
/// [block], [rect], etc.) as a certain percentage of the width of the page.
|
||||
/// Here, the rectangle's width is set to `{25%}`, so it takes up one fourth of
|
||||
/// the page's _inner_ width (the width minus margins).
|
||||
///
|
||||
/// #(100% - 50pt).length \
|
||||
/// #(100% - 50pt).ratio
|
||||
/// ```example
|
||||
/// #rect(width: 25%)
|
||||
/// ```
|
||||
///
|
||||
/// Bare lengths or ratios are always valid where relative lengths are expected,
|
||||
/// but the two can also be freely mixed:
|
||||
/// ```example
|
||||
/// #rect(width: 25% + 1cm)
|
||||
/// ```
|
||||
///
|
||||
/// If you're trying to size an element so that it takes up the page's _full_
|
||||
/// width, you have a few options (this highly depends on your exact use case):
|
||||
///
|
||||
/// 1. Set page margins to `{0pt}` (`[#set page(margin: 0pt)]`)
|
||||
/// 2. Multiply the ratio by the known full page width (`{21cm * 69%}`)
|
||||
/// 3. Use padding which will negate the margins (`[#pad(x: -2.5cm, ...)]`)
|
||||
/// 4. Use the page [background](page.background) or
|
||||
/// [foreground](page.foreground) field as those don't take margins into
|
||||
/// account (note that it will render the content outside of the document
|
||||
/// flow, see [place] to control the content position)
|
||||
///
|
||||
/// # Relative to a container
|
||||
/// When a layout element (e.g. a [rect]) is nested in another layout container
|
||||
/// (e.g. a [block]) instead of being a direct descendant of the page, relative
|
||||
/// widths become relative to the container:
|
||||
///
|
||||
/// ```example
|
||||
/// #block(
|
||||
/// width: 100pt,
|
||||
/// fill: aqua,
|
||||
/// rect(width: 50%),
|
||||
/// )
|
||||
/// ```
|
||||
///
|
||||
/// # Scripting
|
||||
/// You can multiply relative lengths by [ratios]($ratio), [integers]($int), and
|
||||
/// [floats]($float).
|
||||
///
|
||||
/// A relative length has the following fields:
|
||||
/// - `length`: Its length component.
|
||||
/// - `ratio`: Its ratio component.
|
||||
///
|
||||
/// ```example
|
||||
/// #(100% - 50pt).length \
|
||||
/// #(100% - 50pt).ratio
|
||||
/// ```
|
||||
#[ty(cast, name = "relative", title = "Relative Length")]
|
||||
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Rel<T: Numeric = Length> {
|
||||
|
@ -307,6 +307,20 @@ impl Transform {
|
||||
Self { sx, sy, ..Self::identity() }
|
||||
}
|
||||
|
||||
/// A scale transform at a specific position.
|
||||
pub fn scale_at(sx: Ratio, sy: Ratio, px: Abs, py: Abs) -> Self {
|
||||
Self::translate(px, py)
|
||||
.pre_concat(Self::scale(sx, sy))
|
||||
.pre_concat(Self::translate(-px, -py))
|
||||
}
|
||||
|
||||
/// A rotate transform at a specific position.
|
||||
pub fn rotate_at(angle: Angle, px: Abs, py: Abs) -> Self {
|
||||
Self::translate(px, py)
|
||||
.pre_concat(Self::rotate(angle))
|
||||
.pre_concat(Self::translate(-px, -py))
|
||||
}
|
||||
|
||||
/// A rotate transform.
|
||||
pub fn rotate(angle: Angle) -> Self {
|
||||
let cos = Ratio::new(angle.cos());
|
||||
|
@ -13,8 +13,8 @@ use crate::math::Mathy;
|
||||
/// ```
|
||||
#[elem(Mathy)]
|
||||
pub struct AccentElem {
|
||||
/// The base to which the accent is applied.
|
||||
/// May consist of multiple letters.
|
||||
/// The base to which the accent is applied. May consist of multiple
|
||||
/// letters.
|
||||
///
|
||||
/// ```example
|
||||
/// $arrow(A B C)$
|
||||
@ -51,9 +51,24 @@ pub struct AccentElem {
|
||||
pub accent: Accent,
|
||||
|
||||
/// The size of the accent, relative to the width of the base.
|
||||
///
|
||||
/// ```example
|
||||
/// $dash(A, size: #150%)$
|
||||
/// ```
|
||||
#[resolve]
|
||||
#[default(Rel::one())]
|
||||
pub size: Rel<Length>,
|
||||
|
||||
/// Whether to remove the dot on top of lowercase i and j when adding a top
|
||||
/// accent.
|
||||
///
|
||||
/// This enables the `dtls` OpenType feature.
|
||||
///
|
||||
/// ```example
|
||||
/// $hat(dotless: #false, i)$
|
||||
/// ```
|
||||
#[default(true)]
|
||||
pub dotless: bool,
|
||||
}
|
||||
|
||||
/// An accent character.
|
||||
@ -103,11 +118,18 @@ macro_rules! accents {
|
||||
/// The size of the accent, relative to the width of the base.
|
||||
#[named]
|
||||
size: Option<Rel<Length>>,
|
||||
/// Whether to remove the dot on top of lowercase i and j when
|
||||
/// adding a top accent.
|
||||
#[named]
|
||||
dotless: Option<bool>,
|
||||
) -> Content {
|
||||
let mut accent = AccentElem::new(base, Accent::new($primary));
|
||||
if let Some(size) = size {
|
||||
accent = accent.with_size(size);
|
||||
}
|
||||
if let Some(dotless) = dotless {
|
||||
accent = accent.with_dotless(dotless);
|
||||
}
|
||||
accent.pack()
|
||||
}
|
||||
)+
|
||||
|
@ -15,7 +15,7 @@ use crate::math::Mathy;
|
||||
/// # Syntax
|
||||
/// This function also has dedicated syntax: Use a slash to turn neighbouring
|
||||
/// expressions into a fraction. Multiple atoms can be grouped into a single
|
||||
/// expression using round grouping parenthesis. Such parentheses are removed
|
||||
/// expression using round grouping parentheses. Such parentheses are removed
|
||||
/// from the output, but you can nest multiple to force them.
|
||||
#[elem(title = "Fraction", Mathy)]
|
||||
pub struct FracElem {
|
||||
|
@ -6,7 +6,7 @@ use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, LazyLock};
|
||||
|
||||
use comemo::Tracked;
|
||||
use comemo::{Track, Tracked};
|
||||
use ecow::{eco_format, EcoString, EcoVec};
|
||||
use hayagriva::archive::ArchivedStyle;
|
||||
use hayagriva::io::BibLaTeXError;
|
||||
@ -20,7 +20,7 @@ use typst_syntax::{Span, Spanned};
|
||||
use typst_utils::{Get, ManuallyHash, NonZeroExt, PicoStr};
|
||||
|
||||
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::engine::{Engine, Sink};
|
||||
use crate::foundations::{
|
||||
elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement,
|
||||
OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles,
|
||||
@ -999,6 +999,8 @@ impl ElemRenderer<'_> {
|
||||
(self.routines.eval_string)(
|
||||
self.routines,
|
||||
self.world,
|
||||
// TODO: propagate warnings
|
||||
Sink::new().track_mut(),
|
||||
math,
|
||||
self.span,
|
||||
EvalMode::Math,
|
||||
|
@ -259,10 +259,11 @@ impl Show for Packed<EnumElem> {
|
||||
.spanned(self.span());
|
||||
|
||||
if tight {
|
||||
let leading = ParElem::leading_in(styles);
|
||||
let spacing =
|
||||
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
|
||||
realized = spacing + realized;
|
||||
let spacing = self
|
||||
.spacing(styles)
|
||||
.unwrap_or_else(|| ParElem::leading_in(styles).into());
|
||||
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
|
||||
realized = v + realized;
|
||||
}
|
||||
|
||||
Ok(realized)
|
||||
|
@ -457,7 +457,7 @@ impl Outlinable for Packed<FigureElem> {
|
||||
/// customize the appearance of captions for all figures or figures of a
|
||||
/// specific kind.
|
||||
///
|
||||
/// In addition to its `pos` and `body`, the `caption` also provides the
|
||||
/// In addition to its `position` and `body`, the `caption` also provides the
|
||||
/// figure's `kind`, `supplement`, `counter`, and `numbering` as fields. These
|
||||
/// parts can be used in [`where`]($function.where) selectors and show rules to
|
||||
/// build a completely custom caption.
|
||||
|
@ -166,10 +166,11 @@ impl Show for Packed<ListElem> {
|
||||
.spanned(self.span());
|
||||
|
||||
if tight {
|
||||
let leading = ParElem::leading_in(styles);
|
||||
let spacing =
|
||||
VElem::new(leading.into()).with_weak(true).with_attach(true).pack();
|
||||
realized = spacing + realized;
|
||||
let spacing = self
|
||||
.spacing(styles)
|
||||
.unwrap_or_else(|| ParElem::leading_in(styles).into());
|
||||
let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
|
||||
realized = v + realized;
|
||||
}
|
||||
|
||||
Ok(realized)
|
||||
|
@ -189,13 +189,15 @@ impl Show for Packed<TermsElem> {
|
||||
.styled(TermsElem::set_within(true));
|
||||
|
||||
if tight {
|
||||
let leading = ParElem::leading_in(styles);
|
||||
let spacing = VElem::new(leading.into())
|
||||
let spacing = self
|
||||
.spacing(styles)
|
||||
.unwrap_or_else(|| ParElem::leading_in(styles).into());
|
||||
let v = VElem::new(spacing.into())
|
||||
.with_weak(true)
|
||||
.with_attach(true)
|
||||
.pack()
|
||||
.spanned(span);
|
||||
realized = spacing + realized;
|
||||
realized = v + realized;
|
||||
}
|
||||
|
||||
Ok(realized)
|
||||
|
@ -55,6 +55,7 @@ routines! {
|
||||
fn eval_string(
|
||||
routines: &Routines,
|
||||
world: Tracked<dyn World + '_>,
|
||||
sink: TrackedMut<Sink>,
|
||||
string: &str,
|
||||
span: Span,
|
||||
mode: EvalMode,
|
||||
|
@ -14,7 +14,7 @@ macro_rules! translation {
|
||||
};
|
||||
}
|
||||
|
||||
const TRANSLATIONS: [(&str, &str); 38] = [
|
||||
const TRANSLATIONS: [(&str, &str); 39] = [
|
||||
translation!("ar"),
|
||||
translation!("bg"),
|
||||
translation!("ca"),
|
||||
@ -31,6 +31,7 @@ const TRANSLATIONS: [(&str, &str); 38] = [
|
||||
translation!("el"),
|
||||
translation!("he"),
|
||||
translation!("hu"),
|
||||
translation!("id"),
|
||||
translation!("is"),
|
||||
translation!("it"),
|
||||
translation!("ja"),
|
||||
@ -82,6 +83,7 @@ impl Lang {
|
||||
pub const HEBREW: Self = Self(*b"he ", 2);
|
||||
pub const HUNGARIAN: Self = Self(*b"hu ", 2);
|
||||
pub const ICELANDIC: Self = Self(*b"is ", 2);
|
||||
pub const INDONESIAN: Self = Self(*b"id ", 2);
|
||||
pub const ITALIAN: Self = Self(*b"it ", 2);
|
||||
pub const JAPANESE: Self = Self(*b"ja ", 2);
|
||||
pub const LATIN: Self = Self(*b"la ", 2);
|
||||
|
@ -42,7 +42,7 @@ use ttf_parser::Tag;
|
||||
use typst_syntax::Spanned;
|
||||
use typst_utils::singleton;
|
||||
|
||||
use crate::diag::{bail, warning, HintedStrResult, SourceResult};
|
||||
use crate::diag::{bail, warning, HintedStrResult, SourceResult, StrResult};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{
|
||||
cast, dict, elem, Args, Array, Cast, Construct, Content, Dict, Fold, IntoValue,
|
||||
@ -891,9 +891,21 @@ cast! {
|
||||
}
|
||||
|
||||
/// Font family fallback list.
|
||||
///
|
||||
/// Must contain at least one font.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Hash)]
|
||||
pub struct FontList(pub Vec<FontFamily>);
|
||||
|
||||
impl FontList {
|
||||
pub fn new(fonts: Vec<FontFamily>) -> StrResult<Self> {
|
||||
if fonts.is_empty() {
|
||||
bail!("font fallback list must not be empty")
|
||||
} else {
|
||||
Ok(Self(fonts))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a FontList {
|
||||
type IntoIter = std::slice::Iter<'a, FontFamily>;
|
||||
type Item = &'a FontFamily;
|
||||
@ -911,7 +923,7 @@ cast! {
|
||||
self.0.into_value()
|
||||
},
|
||||
family: FontFamily => Self(vec![family]),
|
||||
values: Array => Self(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?),
|
||||
values: Array => Self::new(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?)?,
|
||||
}
|
||||
|
||||
/// Resolve a prioritized iterator over the font families.
|
||||
|
@ -238,7 +238,7 @@ impl<'s> SmartQuotes<'s> {
|
||||
"cs" | "de" | "et" | "is" | "lt" | "lv" | "sk" | "sl" => low_high,
|
||||
"da" => ("‘", "’", "“", "”"),
|
||||
"fr" | "ru" if alternative => default,
|
||||
"fr" => ("‹\u{00A0}", "\u{00A0}›", "«\u{00A0}", "\u{00A0}»"),
|
||||
"fr" => ("“", "”", "«\u{202F}", "\u{202F}»"),
|
||||
"fi" | "sv" if alternative => ("’", "’", "»", "»"),
|
||||
"bs" | "fi" | "sv" => ("’", "’", "”", "”"),
|
||||
"it" if alternative => default,
|
||||
|
@ -148,11 +148,11 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
|
||||
/// | `magma` | A black to purple to yellow color map. |
|
||||
/// | `plasma` | A purple to pink to yellow color map. |
|
||||
/// | `rocket` | A black to red to white color map. |
|
||||
/// | `mako` | A black to teal to yellow color map. |
|
||||
/// | `mako` | A black to teal to white color map. |
|
||||
/// | `vlag` | A light blue to white to red color map. |
|
||||
/// | `icefire` | A light teal to black to yellow color map. |
|
||||
/// | `icefire` | A light teal to black to orange color map. |
|
||||
/// | `flare` | A orange to purple color map that is perceptually uniform. |
|
||||
/// | `crest` | A blue to white to red color map. |
|
||||
/// | `crest` | A light green to blue color map. |
|
||||
///
|
||||
/// Some popular presets are not included because they are not available under a
|
||||
/// free licence. Others, like
|
||||
|
@ -10,6 +10,8 @@ use crate::foundations::{
|
||||
use crate::layout::{Abs, Axes, BlockElem, Length, Point, Rel, Size};
|
||||
use crate::visualize::{FillRule, Paint, Stroke};
|
||||
|
||||
use super::FixedStroke;
|
||||
|
||||
/// A curve consisting of movements, lines, and Bézier segments.
|
||||
///
|
||||
/// At any point in time, there is a conceptual pen or cursor.
|
||||
@ -530,3 +532,65 @@ impl Curve {
|
||||
Size::new(max_x - min_x, max_y - min_y)
|
||||
}
|
||||
}
|
||||
|
||||
impl Curve {
|
||||
fn to_kurbo(&self) -> impl Iterator<Item = kurbo::PathEl> + '_ {
|
||||
use kurbo::PathEl;
|
||||
|
||||
self.0.iter().map(|item| match *item {
|
||||
CurveItem::Move(point) => PathEl::MoveTo(point_to_kurbo(point)),
|
||||
CurveItem::Line(point) => PathEl::LineTo(point_to_kurbo(point)),
|
||||
CurveItem::Cubic(point, point1, point2) => PathEl::CurveTo(
|
||||
point_to_kurbo(point),
|
||||
point_to_kurbo(point1),
|
||||
point_to_kurbo(point2),
|
||||
),
|
||||
CurveItem::Close => PathEl::ClosePath,
|
||||
})
|
||||
}
|
||||
|
||||
/// When this curve is interpreted as a clip mask, would it contain `point`?
|
||||
pub fn contains(&self, fill_rule: FillRule, needle: Point) -> bool {
|
||||
let kurbo = kurbo::BezPath::from_vec(self.to_kurbo().collect());
|
||||
let windings = kurbo::Shape::winding(&kurbo, point_to_kurbo(needle));
|
||||
match fill_rule {
|
||||
FillRule::NonZero => windings != 0,
|
||||
FillRule::EvenOdd => windings % 2 != 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// When this curve is stroked with `stroke`, would the stroke contain
|
||||
/// `point`?
|
||||
pub fn stroke_contains(&self, stroke: &FixedStroke, needle: Point) -> bool {
|
||||
let width = stroke.thickness.to_raw();
|
||||
let cap = match stroke.cap {
|
||||
super::LineCap::Butt => kurbo::Cap::Butt,
|
||||
super::LineCap::Round => kurbo::Cap::Round,
|
||||
super::LineCap::Square => kurbo::Cap::Square,
|
||||
};
|
||||
let join = match stroke.join {
|
||||
super::LineJoin::Miter => kurbo::Join::Miter,
|
||||
super::LineJoin::Round => kurbo::Join::Round,
|
||||
super::LineJoin::Bevel => kurbo::Join::Bevel,
|
||||
};
|
||||
let miter_limit = stroke.miter_limit.get();
|
||||
let mut style = kurbo::Stroke::new(width)
|
||||
.with_caps(cap)
|
||||
.with_join(join)
|
||||
.with_miter_limit(miter_limit);
|
||||
if let Some(dash) = &stroke.dash {
|
||||
style = style.with_dashes(
|
||||
dash.phase.to_raw(),
|
||||
dash.array.iter().copied().map(Abs::to_raw),
|
||||
);
|
||||
}
|
||||
let opts = kurbo::StrokeOpts::default();
|
||||
let tolerance = 0.01;
|
||||
let expanded = kurbo::stroke(self.to_kurbo(), &style, &opts, tolerance);
|
||||
kurbo::Shape::contains(&expanded, point_to_kurbo(needle))
|
||||
}
|
||||
}
|
||||
|
||||
fn point_to_kurbo(point: Point) -> kurbo::Point {
|
||||
kurbo::Point::new(point.x.to_raw(), point.y.to_raw())
|
||||
}
|
||||
|
@ -120,12 +120,12 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
|
||||
/// #let spaces = (
|
||||
/// ("Oklab", color.oklab),
|
||||
/// ("Oklch", color.oklch),
|
||||
/// ("linear-RGB", color.linear-rgb),
|
||||
/// ("sRGB", color.rgb),
|
||||
/// ("linear-RGB", color.linear-rgb),
|
||||
/// ("CMYK", color.cmyk),
|
||||
/// ("Grayscale", color.luma),
|
||||
/// ("HSL", color.hsl),
|
||||
/// ("HSV", color.hsv),
|
||||
/// ("Grayscale", color.luma),
|
||||
/// )
|
||||
///
|
||||
/// #for (name, space) in spaces {
|
||||
|
@ -3,6 +3,8 @@ use std::hash::{Hash, Hasher};
|
||||
use std::io;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::diag::{bail, StrResult};
|
||||
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
|
||||
use ecow::{eco_format, EcoString};
|
||||
use image::codecs::gif::GifDecoder;
|
||||
use image::codecs::jpeg::JpegDecoder;
|
||||
@ -11,9 +13,6 @@ use image::{
|
||||
guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel,
|
||||
};
|
||||
|
||||
use crate::diag::{bail, StrResult};
|
||||
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
|
||||
|
||||
/// A decoded raster image.
|
||||
#[derive(Clone, Hash)]
|
||||
pub struct RasterImage(Arc<Repr>);
|
||||
@ -22,7 +21,8 @@ pub struct RasterImage(Arc<Repr>);
|
||||
struct Repr {
|
||||
data: Bytes,
|
||||
format: RasterFormat,
|
||||
dynamic: image::DynamicImage,
|
||||
dynamic: Arc<DynamicImage>,
|
||||
exif_rotation: Option<u32>,
|
||||
icc: Option<Bytes>,
|
||||
dpi: Option<f64>,
|
||||
}
|
||||
@ -50,6 +50,8 @@ impl RasterImage {
|
||||
format: RasterFormat,
|
||||
icc: Smart<Bytes>,
|
||||
) -> StrResult<RasterImage> {
|
||||
let mut exif_rot = None;
|
||||
|
||||
let (dynamic, icc, dpi) = match format {
|
||||
RasterFormat::Exchange(format) => {
|
||||
fn decode<T: ImageDecoder>(
|
||||
@ -85,6 +87,7 @@ impl RasterImage {
|
||||
// Apply rotation from EXIF metadata.
|
||||
if let Some(rotation) = exif.as_ref().and_then(exif_rotation) {
|
||||
apply_rotation(&mut dynamic, rotation);
|
||||
exif_rot = Some(rotation);
|
||||
}
|
||||
|
||||
// Extract pixel density.
|
||||
@ -136,7 +139,14 @@ impl RasterImage {
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Self(Arc::new(Repr { data, format, dynamic, icc, dpi })))
|
||||
Ok(Self(Arc::new(Repr {
|
||||
data,
|
||||
format,
|
||||
exif_rotation: exif_rot,
|
||||
dynamic: Arc::new(dynamic),
|
||||
icc,
|
||||
dpi,
|
||||
})))
|
||||
}
|
||||
|
||||
/// The raw image data.
|
||||
@ -159,6 +169,11 @@ impl RasterImage {
|
||||
self.dynamic().height()
|
||||
}
|
||||
|
||||
/// TODO.
|
||||
pub fn exif_rotation(&self) -> Option<u32> {
|
||||
self.0.exif_rotation
|
||||
}
|
||||
|
||||
/// The image's pixel density in pixels per inch, if known.
|
||||
///
|
||||
/// This is guaranteed to be positive.
|
||||
@ -167,7 +182,7 @@ impl RasterImage {
|
||||
}
|
||||
|
||||
/// Access the underlying dynamic image.
|
||||
pub fn dynamic(&self) -> &image::DynamicImage {
|
||||
pub fn dynamic(&self) -> &Arc<DynamicImage> {
|
||||
&self.0.dynamic
|
||||
}
|
||||
|
||||
@ -325,12 +340,12 @@ fn apply_rotation(image: &mut DynamicImage, rotation: u32) {
|
||||
ops::flip_horizontal_in_place(image);
|
||||
*image = image.rotate270();
|
||||
}
|
||||
6 => *image = image.rotate90(),
|
||||
6 => *image = image.rotate270(),
|
||||
7 => {
|
||||
ops::flip_horizontal_in_place(image);
|
||||
*image = image.rotate90();
|
||||
}
|
||||
8 => *image = image.rotate270(),
|
||||
8 => *image = image.rotate90(),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ pub struct RectElem {
|
||||
pub radius: Corners<Option<Rel<Length>>>,
|
||||
|
||||
/// How much to pad the rectangle's content.
|
||||
/// See the [box's documentation]($box.outset) for more details.
|
||||
/// See the [box's documentation]($box.inset) for more details.
|
||||
#[resolve]
|
||||
#[fold]
|
||||
#[default(Sides::splat(Some(Abs::pt(5.0).into())))]
|
||||
|
@ -4,5 +4,5 @@ equation = Rovnice
|
||||
bibliography = Bibliografie
|
||||
heading = Kapitola
|
||||
outline = Obsah
|
||||
raw = Seznam
|
||||
raw = Výpis
|
||||
page = strana
|
8
crates/typst-library/translations/id.txt
Normal file
@ -0,0 +1,8 @@
|
||||
figure = Gambar
|
||||
table = Tabel
|
||||
equation = Persamaan
|
||||
bibliography = Daftar Pustaka
|
||||
heading = Bagian
|
||||
outline = Daftar Isi
|
||||
raw = Kode
|
||||
page = halaman
|
@ -19,20 +19,13 @@ typst-macros = { workspace = true }
|
||||
typst-syntax = { workspace = true }
|
||||
typst-timing = { workspace = true }
|
||||
typst-utils = { workspace = true }
|
||||
arrayvec = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
bytemuck = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
image = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
miniz_oxide = { workspace = true }
|
||||
pdf-writer = { workspace = true }
|
||||
krilla = { workspace = true }
|
||||
krilla-svg = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
subsetter = { workspace = true }
|
||||
svg2pdf = { workspace = true }
|
||||
ttf-parser = { workspace = true }
|
||||
xmp-writer = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -1,385 +0,0 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::Direction;
|
||||
use pdf_writer::writers::PageLabel;
|
||||
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::foundations::{Datetime, Smart};
|
||||
use typst_library::layout::Dir;
|
||||
use typst_library::text::Lang;
|
||||
use typst_syntax::Span;
|
||||
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
|
||||
|
||||
use crate::page::PdfPageLabel;
|
||||
use crate::{hash_base64, outline, TextStrExt, Timestamp, Timezone, WithEverything};
|
||||
|
||||
/// Write the document catalog.
|
||||
pub fn write_catalog(
|
||||
ctx: WithEverything,
|
||||
pdf: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
) -> SourceResult<()> {
|
||||
let lang = ctx
|
||||
.resources
|
||||
.languages
|
||||
.iter()
|
||||
.max_by_key(|(_, &count)| count)
|
||||
.map(|(&l, _)| l);
|
||||
|
||||
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
|
||||
Direction::R2L
|
||||
} else {
|
||||
Direction::L2R
|
||||
};
|
||||
|
||||
// Write the outline tree.
|
||||
let outline_root_id = outline::write_outline(pdf, alloc, &ctx);
|
||||
|
||||
// Write the page labels.
|
||||
let page_labels = write_page_labels(pdf, alloc, &ctx);
|
||||
|
||||
// Write the document information.
|
||||
let info_ref = alloc.bump();
|
||||
let mut info = pdf.document_info(info_ref);
|
||||
let mut xmp = XmpWriter::new();
|
||||
if let Some(title) = &ctx.document.info.title {
|
||||
info.title(TextStr::trimmed(title));
|
||||
xmp.title([(None, title.as_str())]);
|
||||
}
|
||||
|
||||
if let Some(description) = &ctx.document.info.description {
|
||||
info.subject(TextStr::trimmed(description));
|
||||
xmp.description([(None, description.as_str())]);
|
||||
}
|
||||
|
||||
let authors = &ctx.document.info.author;
|
||||
if !authors.is_empty() {
|
||||
// Turns out that if the authors are given in both the document
|
||||
// information dictionary and the XMP metadata, Acrobat takes a little
|
||||
// bit of both: The first author from the document information
|
||||
// dictionary and the remaining authors from the XMP metadata.
|
||||
//
|
||||
// To fix this for Acrobat, we could omit the remaining authors or all
|
||||
// metadata from the document information catalog (it is optional) and
|
||||
// only write XMP. However, not all other tools (including Apple
|
||||
// Preview) read the XMP data. This means we do want to include all
|
||||
// authors in the document information dictionary.
|
||||
//
|
||||
// Thus, the only alternative is to fold all authors into a single
|
||||
// `<rdf:li>` in the XMP metadata. This is, in fact, exactly what the
|
||||
// PDF/A spec Part 1 section 6.7.3 has to say about the matter. It's a
|
||||
// bit weird to not use the array (and it makes Acrobat show the author
|
||||
// list in quotes), but there's not much we can do about that.
|
||||
let joined = authors.join(", ");
|
||||
info.author(TextStr::trimmed(&joined));
|
||||
xmp.creator([joined.as_str()]);
|
||||
}
|
||||
|
||||
let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
|
||||
info.creator(TextStr(&creator));
|
||||
xmp.creator_tool(&creator);
|
||||
|
||||
let keywords = &ctx.document.info.keywords;
|
||||
if !keywords.is_empty() {
|
||||
let joined = keywords.join(", ");
|
||||
info.keywords(TextStr::trimmed(&joined));
|
||||
xmp.pdf_keywords(&joined);
|
||||
}
|
||||
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
|
||||
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
|
||||
info.creation_date(pdf_date);
|
||||
info.modified_date(pdf_date);
|
||||
}
|
||||
|
||||
info.finish();
|
||||
|
||||
// A unique ID for this instance of the document. Changes if anything
|
||||
// changes in the frames.
|
||||
let instance_id = hash_base64(&pdf.as_bytes());
|
||||
|
||||
// Determine the document's ID. It should be as stable as possible.
|
||||
const PDF_VERSION: &str = "PDF-1.7";
|
||||
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
|
||||
// We were provided with a stable ID. Yay!
|
||||
hash_base64(&(PDF_VERSION, ident))
|
||||
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
|
||||
// If not provided from the outside, but title and author were given, we
|
||||
// compute a hash of them, which should be reasonably stable and unique.
|
||||
hash_base64(&(PDF_VERSION, &ctx.document.info.title, &ctx.document.info.author))
|
||||
} else {
|
||||
// The user provided no usable metadata which we can use as an `/ID`.
|
||||
instance_id.clone()
|
||||
};
|
||||
|
||||
xmp.document_id(&doc_id);
|
||||
xmp.instance_id(&instance_id);
|
||||
xmp.format("application/pdf");
|
||||
xmp.pdf_version("1.7");
|
||||
xmp.language(ctx.resources.languages.keys().map(|lang| LangId(lang.as_str())));
|
||||
xmp.num_pages(ctx.document.pages.len() as u32);
|
||||
xmp.rendition_class(RenditionClass::Proof);
|
||||
|
||||
if let Some(xmp_date) = date.and_then(|date| xmp_date(date, tz)) {
|
||||
xmp.create_date(xmp_date);
|
||||
xmp.modify_date(xmp_date);
|
||||
|
||||
if ctx.options.standards.pdfa {
|
||||
let mut history = xmp.history();
|
||||
history
|
||||
.add_event()
|
||||
.action(xmp_writer::ResourceEventAction::Saved)
|
||||
.when(xmp_date)
|
||||
.instance_id(&eco_format!("{instance_id}_source"));
|
||||
history
|
||||
.add_event()
|
||||
.action(xmp_writer::ResourceEventAction::Converted)
|
||||
.when(xmp_date)
|
||||
.instance_id(&instance_id)
|
||||
.software_agent(&creator);
|
||||
}
|
||||
}
|
||||
|
||||
// Assert dominance.
|
||||
if let Some((part, conformance)) = ctx.options.standards.pdfa_part {
|
||||
let mut extension_schemas = xmp.extension_schemas();
|
||||
extension_schemas
|
||||
.xmp_media_management()
|
||||
.properties()
|
||||
.describe_instance_id();
|
||||
extension_schemas.pdf().properties().describe_all();
|
||||
extension_schemas.finish();
|
||||
xmp.pdfa_part(part);
|
||||
xmp.pdfa_conformance(conformance);
|
||||
}
|
||||
|
||||
let xmp_buf = xmp.finish(None);
|
||||
let meta_ref = alloc.bump();
|
||||
pdf.stream(meta_ref, xmp_buf.as_bytes())
|
||||
.pair(Name(b"Type"), Name(b"Metadata"))
|
||||
.pair(Name(b"Subtype"), Name(b"XML"));
|
||||
|
||||
// Set IDs only now, so that we don't need to clone them.
|
||||
pdf.set_file_id((doc_id.into_bytes(), instance_id.into_bytes()));
|
||||
|
||||
// Write the document catalog.
|
||||
let catalog_ref = alloc.bump();
|
||||
let mut catalog = pdf.catalog(catalog_ref);
|
||||
catalog.pages(ctx.page_tree_ref);
|
||||
catalog.viewer_preferences().direction(dir);
|
||||
catalog.metadata(meta_ref);
|
||||
|
||||
let has_dests = !ctx.references.named_destinations.dests.is_empty();
|
||||
let has_embeddings = !ctx.references.embedded_files.is_empty();
|
||||
|
||||
// Write the `/Names` dictionary.
|
||||
if has_dests || has_embeddings {
|
||||
// Write the named destination tree if there are any entries.
|
||||
let mut name_dict = catalog.names();
|
||||
if has_dests {
|
||||
let mut dests_name_tree = name_dict.destinations();
|
||||
let mut names = dests_name_tree.names();
|
||||
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
|
||||
names.insert(Str(name.resolve().as_bytes()), dest_ref);
|
||||
}
|
||||
}
|
||||
|
||||
if has_embeddings {
|
||||
let mut embedded_files = name_dict.embedded_files();
|
||||
let mut names = embedded_files.names();
|
||||
for (name, file_ref) in &ctx.references.embedded_files {
|
||||
names.insert(Str(name.as_bytes()), *file_ref);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if has_embeddings && ctx.options.standards.pdfa {
|
||||
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
|
||||
let mut associated_files = catalog.insert(Name(b"AF")).array().typed();
|
||||
for (_, file_ref) in ctx.references.embedded_files {
|
||||
associated_files.item(file_ref).finish();
|
||||
}
|
||||
}
|
||||
|
||||
// Insert the page labels.
|
||||
if !page_labels.is_empty() {
|
||||
let mut num_tree = catalog.page_labels();
|
||||
let mut entries = num_tree.nums();
|
||||
for (n, r) in &page_labels {
|
||||
entries.insert(n.get() as i32 - 1, *r);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(outline_root_id) = outline_root_id {
|
||||
catalog.outlines(outline_root_id);
|
||||
}
|
||||
|
||||
if let Some(lang) = lang {
|
||||
catalog.lang(TextStr(lang.as_str()));
|
||||
}
|
||||
|
||||
if ctx.options.standards.pdfa {
|
||||
catalog
|
||||
.output_intents()
|
||||
.push()
|
||||
.subtype(pdf_writer::types::OutputIntentSubtype::PDFA)
|
||||
.output_condition(TextStr("sRGB"))
|
||||
.output_condition_identifier(TextStr("Custom"))
|
||||
.info(TextStr("sRGB IEC61966-2.1"))
|
||||
.dest_output_profile(ctx.globals.color_functions.srgb.unwrap());
|
||||
}
|
||||
|
||||
catalog.finish();
|
||||
|
||||
if ctx.options.standards.pdfa && pdf.refs().count() > 8388607 {
|
||||
bail!(Span::detached(), "too many PDF objects");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the page labels.
|
||||
pub(crate) fn write_page_labels(
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
ctx: &WithEverything,
|
||||
) -> Vec<(NonZeroUsize, Ref)> {
|
||||
// If there is no exported page labeled, we skip the writing
|
||||
if !ctx.pages.iter().filter_map(Option::as_ref).any(|p| {
|
||||
p.label
|
||||
.as_ref()
|
||||
.is_some_and(|l| l.prefix.is_some() || l.style.is_some())
|
||||
}) {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let empty_label = PdfPageLabel::default();
|
||||
let mut result = vec![];
|
||||
let mut prev: Option<&PdfPageLabel> = None;
|
||||
|
||||
// Skip non-exported pages for numbering.
|
||||
for (i, page) in ctx.pages.iter().filter_map(Option::as_ref).enumerate() {
|
||||
let nr = NonZeroUsize::new(1 + i).unwrap();
|
||||
// If there are pages with empty labels between labeled pages, we must
|
||||
// write empty PageLabel entries.
|
||||
let label = page.label.as_ref().unwrap_or(&empty_label);
|
||||
|
||||
if let Some(pre) = prev {
|
||||
if label.prefix == pre.prefix
|
||||
&& label.style == pre.style
|
||||
&& label.offset == pre.offset.map(|n| n.saturating_add(1))
|
||||
{
|
||||
prev = Some(label);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let id = alloc.bump();
|
||||
let mut entry = chunk.indirect(id).start::<PageLabel>();
|
||||
|
||||
// Only add what is actually provided. Don't add empty prefix string if
|
||||
// it wasn't given for example.
|
||||
if let Some(prefix) = &label.prefix {
|
||||
entry.prefix(TextStr::trimmed(prefix));
|
||||
}
|
||||
|
||||
if let Some(style) = label.style {
|
||||
entry.style(style.to_pdf_numbering_style());
|
||||
}
|
||||
|
||||
if let Some(offset) = label.offset {
|
||||
entry.offset(offset.get() as i32);
|
||||
}
|
||||
|
||||
result.push((nr, id));
|
||||
prev = Some(label);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Resolve the document date.
|
||||
///
|
||||
/// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
|
||||
/// (2) If the `document.date` is set to `auto` or not set, try to use the
|
||||
/// date from the options.
|
||||
/// (3) Otherwise, we don't write date metadata.
|
||||
pub fn document_date(
|
||||
document_date: Smart<Option<Datetime>>,
|
||||
timestamp: Option<Timestamp>,
|
||||
) -> (Option<Datetime>, Option<Timezone>) {
|
||||
match (document_date, timestamp) {
|
||||
(Smart::Custom(date), _) => (date, None),
|
||||
(Smart::Auto, Some(timestamp)) => {
|
||||
(Some(timestamp.datetime), Some(timestamp.timezone))
|
||||
}
|
||||
_ => (None, None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a datetime to a pdf-writer date.
|
||||
pub fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
|
||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||
|
||||
let mut pdf_date = pdf_writer::Date::new(year);
|
||||
|
||||
if let Some(month) = datetime.month() {
|
||||
pdf_date = pdf_date.month(month);
|
||||
}
|
||||
|
||||
if let Some(day) = datetime.day() {
|
||||
pdf_date = pdf_date.day(day);
|
||||
}
|
||||
|
||||
if let Some(h) = datetime.hour() {
|
||||
pdf_date = pdf_date.hour(h);
|
||||
}
|
||||
|
||||
if let Some(m) = datetime.minute() {
|
||||
pdf_date = pdf_date.minute(m);
|
||||
}
|
||||
|
||||
if let Some(s) = datetime.second() {
|
||||
pdf_date = pdf_date.second(s);
|
||||
}
|
||||
|
||||
match tz {
|
||||
Some(Timezone::UTC) => {
|
||||
pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0)
|
||||
}
|
||||
Some(Timezone::Local { hour_offset, minute_offset }) => {
|
||||
pdf_date =
|
||||
pdf_date.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
Some(pdf_date)
|
||||
}
|
||||
|
||||
/// Converts a datetime to an xmp-writer datetime.
|
||||
fn xmp_date(
|
||||
datetime: Datetime,
|
||||
timezone: Option<Timezone>,
|
||||
) -> Option<xmp_writer::DateTime> {
|
||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||
let timezone = timezone.map(|tz| match tz {
|
||||
Timezone::UTC => xmp_writer::Timezone::Utc,
|
||||
Timezone::Local { hour_offset, minute_offset } => {
|
||||
// The xmp-writer use signed integers for the minute offset, which
|
||||
// can be buggy if the minute offset is negative. And because our
|
||||
// minute_offset is ensured to be `0 <= minute_offset < 60`, we can
|
||||
// safely cast it to a signed integer.
|
||||
xmp_writer::Timezone::Local { hour: hour_offset, minute: minute_offset as i8 }
|
||||
}
|
||||
});
|
||||
Some(DateTime {
|
||||
year,
|
||||
month: datetime.month(),
|
||||
day: datetime.day(),
|
||||
hour: datetime.hour(),
|
||||
minute: datetime.minute(),
|
||||
second: datetime.second(),
|
||||
timezone,
|
||||
})
|
||||
}
|
@ -1,394 +0,0 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::visualize::{Color, ColorSpace, Paint};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::{content, deflate, PdfChunk, PdfOptions, Renumber, WithResources};
|
||||
|
||||
// The names of the color spaces.
|
||||
pub const SRGB: Name<'static> = Name(b"srgb");
|
||||
pub const D65_GRAY: Name<'static> = Name(b"d65gray");
|
||||
pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
|
||||
|
||||
// The ICC profiles.
|
||||
static SRGB_ICC_DEFLATED: LazyLock<Vec<u8>> =
|
||||
LazyLock::new(|| deflate(typst_assets::icc::S_RGB_V4));
|
||||
static GRAY_ICC_DEFLATED: LazyLock<Vec<u8>> =
|
||||
LazyLock::new(|| deflate(typst_assets::icc::S_GREY_V4));
|
||||
|
||||
/// The color spaces present in the PDF document
|
||||
#[derive(Default)]
|
||||
pub struct ColorSpaces {
|
||||
use_srgb: bool,
|
||||
use_d65_gray: bool,
|
||||
use_linear_rgb: bool,
|
||||
}
|
||||
|
||||
impl ColorSpaces {
|
||||
/// Mark a color space as used.
|
||||
pub fn mark_as_used(&mut self, color_space: ColorSpace) {
|
||||
match color_space {
|
||||
ColorSpace::Oklch
|
||||
| ColorSpace::Oklab
|
||||
| ColorSpace::Hsl
|
||||
| ColorSpace::Hsv
|
||||
| ColorSpace::Srgb => {
|
||||
self.use_srgb = true;
|
||||
}
|
||||
ColorSpace::D65Gray => {
|
||||
self.use_d65_gray = true;
|
||||
}
|
||||
ColorSpace::LinearRgb => {
|
||||
self.use_linear_rgb = true;
|
||||
}
|
||||
ColorSpace::Cmyk => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the color spaces to the PDF file.
|
||||
pub fn write_color_spaces(&self, mut spaces: Dict, refs: &ColorFunctionRefs) {
|
||||
if self.use_srgb {
|
||||
write(ColorSpace::Srgb, spaces.insert(SRGB).start(), refs);
|
||||
}
|
||||
|
||||
if self.use_d65_gray {
|
||||
write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), refs);
|
||||
}
|
||||
|
||||
if self.use_linear_rgb {
|
||||
write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), refs);
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the necessary color spaces functions and ICC profiles to the
|
||||
/// PDF file.
|
||||
pub fn write_functions(&self, chunk: &mut Chunk, refs: &ColorFunctionRefs) {
|
||||
// Write the sRGB color space.
|
||||
if let Some(id) = refs.srgb {
|
||||
chunk
|
||||
.icc_profile(id, &SRGB_ICC_DEFLATED)
|
||||
.n(3)
|
||||
.range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
// Write the gray color space.
|
||||
if let Some(id) = refs.d65_gray {
|
||||
chunk
|
||||
.icc_profile(id, &GRAY_ICC_DEFLATED)
|
||||
.n(1)
|
||||
.range([0.0, 1.0])
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge two color space usage information together: a given color space is
|
||||
/// considered to be used if it is used on either side.
|
||||
pub fn merge(&mut self, other: &Self) {
|
||||
self.use_d65_gray |= other.use_d65_gray;
|
||||
self.use_linear_rgb |= other.use_linear_rgb;
|
||||
self.use_srgb |= other.use_srgb;
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the color space.
|
||||
pub fn write(
|
||||
color_space: ColorSpace,
|
||||
writer: writers::ColorSpace,
|
||||
refs: &ColorFunctionRefs,
|
||||
) {
|
||||
match color_space {
|
||||
ColorSpace::Srgb
|
||||
| ColorSpace::Oklab
|
||||
| ColorSpace::Hsl
|
||||
| ColorSpace::Hsv
|
||||
| ColorSpace::Oklch => writer.icc_based(refs.srgb.unwrap()),
|
||||
ColorSpace::D65Gray => writer.icc_based(refs.d65_gray.unwrap()),
|
||||
ColorSpace::LinearRgb => {
|
||||
writer.cal_rgb(
|
||||
[0.9505, 1.0, 1.0888],
|
||||
None,
|
||||
Some([1.0, 1.0, 1.0]),
|
||||
Some([
|
||||
0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722, 0.9505,
|
||||
]),
|
||||
);
|
||||
}
|
||||
ColorSpace::Cmyk => writer.device_cmyk(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Global references for color conversion functions.
|
||||
///
|
||||
/// These functions are only written once (at most, they are not written if not
|
||||
/// needed) in the final document, and be shared by all color space
|
||||
/// dictionaries.
|
||||
pub struct ColorFunctionRefs {
|
||||
pub srgb: Option<Ref>,
|
||||
d65_gray: Option<Ref>,
|
||||
}
|
||||
|
||||
impl Renumber for ColorFunctionRefs {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if let Some(r) = &mut self.srgb {
|
||||
r.renumber(offset);
|
||||
}
|
||||
if let Some(r) = &mut self.d65_gray {
|
||||
r.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate all necessary [`ColorFunctionRefs`].
|
||||
pub fn alloc_color_functions_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut used_color_spaces = ColorSpaces::default();
|
||||
|
||||
if context.options.standards.pdfa {
|
||||
used_color_spaces.mark_as_used(ColorSpace::Srgb);
|
||||
}
|
||||
|
||||
context.resources.traverse(&mut |r| {
|
||||
used_color_spaces.merge(&r.colors);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let refs = ColorFunctionRefs {
|
||||
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
|
||||
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
|
||||
};
|
||||
|
||||
Ok((chunk, refs))
|
||||
}
|
||||
|
||||
/// Encodes the color into four f32s, which can be used in a PDF file.
|
||||
/// Ensures that the values are in the range [0.0, 1.0].
|
||||
///
|
||||
/// # Why?
|
||||
/// - Oklab: The a and b components are in the range [-0.5, 0.5] and the PDF
|
||||
/// specifies (and some readers enforce) that all color values be in the range
|
||||
/// [0.0, 1.0]. This means that the PostScript function and the encoded color
|
||||
/// must be offset by 0.5.
|
||||
/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
|
||||
/// specifies that it must be in the range [0.0, 1.0]. This means that the
|
||||
/// PostScript function and the encoded color must be divided by 360.0.
|
||||
pub trait ColorEncode {
|
||||
/// Performs the color to PDF f32 array conversion.
|
||||
fn encode(&self, color: Color) -> [f32; 4];
|
||||
}
|
||||
|
||||
impl ColorEncode for ColorSpace {
|
||||
fn encode(&self, color: Color) -> [f32; 4] {
|
||||
match self {
|
||||
ColorSpace::Oklab | ColorSpace::Oklch | ColorSpace::Hsl | ColorSpace::Hsv => {
|
||||
color.to_space(ColorSpace::Srgb).to_vec4()
|
||||
}
|
||||
_ => color.to_space(*self).to_vec4(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes a paint into either a fill or stroke color.
|
||||
pub(super) trait PaintEncode {
|
||||
/// Set the paint as the fill color.
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()>;
|
||||
|
||||
/// Set the paint as the stroke color.
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()>;
|
||||
}
|
||||
|
||||
impl PaintEncode for Paint {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
|
||||
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
|
||||
Self::Tiling(tiling) => tiling.set_as_fill(ctx, on_text, transforms),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
|
||||
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
|
||||
Self::Tiling(tiling) => tiling.set_as_stroke(ctx, on_text, transforms),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PaintEncode for Color {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
_: bool,
|
||||
_: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Color::Luma(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||
ctx.set_fill_color_space(D65_GRAY);
|
||||
|
||||
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
|
||||
ctx.content.set_fill_color([l]);
|
||||
}
|
||||
Color::LinearRgb(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
|
||||
ctx.set_fill_color_space(LINEAR_SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
|
||||
ctx.content.set_fill_color([r, g, b]);
|
||||
}
|
||||
// Oklab & friends are encoded as RGB.
|
||||
Color::Rgb(_)
|
||||
| Color::Oklab(_)
|
||||
| Color::Oklch(_)
|
||||
| Color::Hsl(_)
|
||||
| Color::Hsv(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
|
||||
ctx.set_fill_color_space(SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
|
||||
ctx.content.set_fill_color([r, g, b]);
|
||||
}
|
||||
Color::Cmyk(_) => {
|
||||
check_cmyk_allowed(ctx.options)?;
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
|
||||
ctx.content.set_fill_cmyk(c, m, y, k);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
_: bool,
|
||||
_: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Color::Luma(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||
ctx.set_stroke_color_space(D65_GRAY);
|
||||
|
||||
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
|
||||
ctx.content.set_stroke_color([l]);
|
||||
}
|
||||
Color::LinearRgb(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
|
||||
ctx.set_stroke_color_space(LINEAR_SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
|
||||
ctx.content.set_stroke_color([r, g, b]);
|
||||
}
|
||||
// Oklab & friends are encoded as RGB.
|
||||
Color::Rgb(_)
|
||||
| Color::Oklab(_)
|
||||
| Color::Oklch(_)
|
||||
| Color::Hsl(_)
|
||||
| Color::Hsv(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
|
||||
ctx.set_stroke_color_space(SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
|
||||
ctx.content.set_stroke_color([r, g, b]);
|
||||
}
|
||||
Color::Cmyk(_) => {
|
||||
check_cmyk_allowed(ctx.options)?;
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
|
||||
ctx.content.set_stroke_cmyk(c, m, y, k);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extra color space functions.
|
||||
pub(super) trait ColorSpaceExt {
|
||||
/// Returns the range of the color space.
|
||||
fn range(self) -> &'static [f32];
|
||||
|
||||
/// Converts a color to the color space.
|
||||
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4>;
|
||||
}
|
||||
|
||||
impl ColorSpaceExt for ColorSpace {
|
||||
fn range(self) -> &'static [f32] {
|
||||
match self {
|
||||
ColorSpace::D65Gray => &[0.0, 1.0],
|
||||
ColorSpace::Oklab => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Oklch => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::LinearRgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Srgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Cmyk => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Hsl => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Hsv => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
}
|
||||
}
|
||||
|
||||
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4> {
|
||||
let components = self.encode(color);
|
||||
|
||||
self.range()
|
||||
.chunks(2)
|
||||
.zip(components)
|
||||
.map(|(range, component)| U::quantize(component, [range[0], range[1]]))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Quantizes a color component to a specific type.
|
||||
pub(super) trait QuantizedColor {
|
||||
fn quantize(color: f32, range: [f32; 2]) -> Self;
|
||||
}
|
||||
|
||||
impl QuantizedColor for u16 {
|
||||
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
|
||||
let value = (color - min) / (max - min);
|
||||
(value * Self::MAX as f32).round().clamp(0.0, Self::MAX as f32) as Self
|
||||
}
|
||||
}
|
||||
|
||||
impl QuantizedColor for f32 {
|
||||
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
|
||||
color.clamp(min, max)
|
||||
}
|
||||
}
|
||||
|
||||
/// Fails with an error if PDF/A processing is enabled.
|
||||
pub(super) fn check_cmyk_allowed(options: &PdfOptions) -> SourceResult<()> {
|
||||
if options.standards.pdfa {
|
||||
bail!(
|
||||
Span::detached(),
|
||||
"cmyk colors are not currently supported by PDF/A export"
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
@ -1,344 +0,0 @@
|
||||
//! OpenType fonts generally define monochrome glyphs, but they can also define
|
||||
//! glyphs with colors. This is how emojis are generally implemented for
|
||||
//! example.
|
||||
//!
|
||||
//! There are various standards to represent color glyphs, but PDF readers don't
|
||||
//! support any of them natively, so Typst has to handle them manually.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use ecow::eco_format;
|
||||
use indexmap::IndexMap;
|
||||
use pdf_writer::types::UnicodeCmap;
|
||||
use pdf_writer::writers::WMode;
|
||||
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
|
||||
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
|
||||
use typst_library::foundations::Repr;
|
||||
use typst_library::layout::Em;
|
||||
use typst_library::text::color::glyph_frame;
|
||||
use typst_library::text::{Font, Glyph, TextItemView};
|
||||
|
||||
use crate::font::{base_font_name, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
|
||||
use crate::resources::{Resources, ResourcesRefs};
|
||||
use crate::{content, EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
|
||||
|
||||
/// Write color fonts in the PDF document.
|
||||
///
|
||||
/// They are written as Type3 fonts, which map glyph IDs to arbitrary PDF
|
||||
/// instructions.
|
||||
pub fn write_color_fonts(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
|
||||
let mut out = HashMap::new();
|
||||
let mut chunk = PdfChunk::new();
|
||||
context.resources.traverse(&mut |resources: &Resources| {
|
||||
let Some(color_fonts) = &resources.color_fonts else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for (color_font, font_slice) in color_fonts.iter() {
|
||||
if out.contains_key(&font_slice) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Allocate some IDs.
|
||||
let subfont_id = chunk.alloc();
|
||||
let cmap_ref = chunk.alloc();
|
||||
let descriptor_ref = chunk.alloc();
|
||||
let widths_ref = chunk.alloc();
|
||||
|
||||
// And a map between glyph IDs and the instructions to draw this
|
||||
// glyph.
|
||||
let mut glyphs_to_instructions = Vec::new();
|
||||
|
||||
let start = font_slice.subfont * 256;
|
||||
let end = (start + 256).min(color_font.glyphs.len());
|
||||
let glyph_count = end - start;
|
||||
let subset = &color_font.glyphs[start..end];
|
||||
let mut widths = Vec::new();
|
||||
let mut gids = Vec::new();
|
||||
|
||||
let scale_factor = font_slice.font.ttf().units_per_em() as f32;
|
||||
|
||||
// Write the instructions for each glyph.
|
||||
for color_glyph in subset {
|
||||
let instructions_stream_ref = chunk.alloc();
|
||||
let width = font_slice
|
||||
.font
|
||||
.advance(color_glyph.gid)
|
||||
.unwrap_or(Em::new(0.0))
|
||||
.get() as f32
|
||||
* scale_factor;
|
||||
widths.push(width);
|
||||
chunk
|
||||
.stream(
|
||||
instructions_stream_ref,
|
||||
color_glyph.instructions.content.wait(),
|
||||
)
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
// Use this stream as instructions to draw the glyph.
|
||||
glyphs_to_instructions.push(instructions_stream_ref);
|
||||
gids.push(color_glyph.gid);
|
||||
}
|
||||
|
||||
// Determine the base font name.
|
||||
gids.sort();
|
||||
let base_font = base_font_name(&font_slice.font, &gids);
|
||||
|
||||
// Write the Type3 font object.
|
||||
let mut pdf_font = chunk.type3_font(subfont_id);
|
||||
pdf_font.name(Name(base_font.as_bytes()));
|
||||
pdf_font.pair(Name(b"Resources"), color_fonts.resources.reference);
|
||||
pdf_font.bbox(color_font.bbox);
|
||||
pdf_font.matrix([1.0 / scale_factor, 0.0, 0.0, 1.0 / scale_factor, 0.0, 0.0]);
|
||||
pdf_font.first_char(0);
|
||||
pdf_font.last_char((glyph_count - 1) as u8);
|
||||
pdf_font.pair(Name(b"Widths"), widths_ref);
|
||||
pdf_font.to_unicode(cmap_ref);
|
||||
pdf_font.font_descriptor(descriptor_ref);
|
||||
|
||||
// Write the /CharProcs dictionary, that maps glyph names to
|
||||
// drawing instructions.
|
||||
let mut char_procs = pdf_font.char_procs();
|
||||
for (gid, instructions_ref) in glyphs_to_instructions.iter().enumerate() {
|
||||
char_procs
|
||||
.pair(Name(eco_format!("glyph{gid}").as_bytes()), *instructions_ref);
|
||||
}
|
||||
char_procs.finish();
|
||||
|
||||
// Write the /Encoding dictionary.
|
||||
let names = (0..glyph_count)
|
||||
.map(|gid| eco_format!("glyph{gid}"))
|
||||
.collect::<Vec<_>>();
|
||||
pdf_font
|
||||
.encoding_custom()
|
||||
.differences()
|
||||
.consecutive(0, names.iter().map(|name| Name(name.as_bytes())));
|
||||
pdf_font.finish();
|
||||
|
||||
// Encode a CMAP to make it possible to search or copy glyphs.
|
||||
let glyph_set = resources.color_glyph_sets.get(&font_slice.font).unwrap();
|
||||
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
|
||||
for (index, glyph) in subset.iter().enumerate() {
|
||||
let Some(text) = glyph_set.get(&glyph.gid) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !text.is_empty() {
|
||||
cmap.pair_with_multiple(index as u8, text.chars());
|
||||
}
|
||||
}
|
||||
chunk.cmap(cmap_ref, &cmap.finish()).writing_mode(WMode::Horizontal);
|
||||
|
||||
// Write the font descriptor.
|
||||
write_font_descriptor(
|
||||
&mut chunk,
|
||||
descriptor_ref,
|
||||
&font_slice.font,
|
||||
&base_font,
|
||||
);
|
||||
|
||||
// Write the widths array
|
||||
chunk.indirect(widths_ref).array().items(widths);
|
||||
|
||||
out.insert(font_slice, subfont_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
|
||||
///
|
||||
/// This mapping is one-to-many because there can only be 256 glyphs in a Type 3
|
||||
/// font, and fonts generally have more color glyphs than that.
|
||||
pub struct ColorFontMap<R> {
|
||||
/// The mapping itself.
|
||||
map: IndexMap<Font, ColorFont>,
|
||||
/// The resources required to render the fonts in this map.
|
||||
///
|
||||
/// For example, this can be the images for glyphs based on bitmaps or SVG.
|
||||
pub resources: Resources<R>,
|
||||
/// The number of font slices (groups of 256 color glyphs), across all color
|
||||
/// fonts.
|
||||
total_slice_count: usize,
|
||||
}
|
||||
|
||||
/// A collection of Type3 font, belonging to the same TTF font.
|
||||
pub struct ColorFont {
|
||||
/// The IDs of each sub-slice of this font. They are the numbers after "Cf"
|
||||
/// in the Resources dictionaries.
|
||||
slice_ids: Vec<usize>,
|
||||
/// The list of all color glyphs in this family.
|
||||
///
|
||||
/// The index in this vector modulo 256 corresponds to the index in one of
|
||||
/// the Type3 fonts in `refs` (the `n`-th in the vector, where `n` is the
|
||||
/// quotient of the index divided by 256).
|
||||
pub glyphs: Vec<ColorGlyph>,
|
||||
/// The global bounding box of the font.
|
||||
pub bbox: Rect,
|
||||
/// A mapping between glyph IDs and character indices in the `glyphs`
|
||||
/// vector.
|
||||
glyph_indices: HashMap<u16, usize>,
|
||||
}
|
||||
|
||||
/// A single color glyph.
|
||||
pub struct ColorGlyph {
|
||||
/// The ID of the glyph.
|
||||
pub gid: u16,
|
||||
/// Instructions to draw the glyph.
|
||||
pub instructions: content::Encoded,
|
||||
}
|
||||
|
||||
impl ColorFontMap<()> {
|
||||
/// Creates a new empty mapping
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
map: IndexMap::new(),
|
||||
total_slice_count: 0,
|
||||
resources: Resources::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given glyph in a TTF font, give the ID of the Type3 font and the
|
||||
/// index of the glyph inside of this Type3 font.
|
||||
///
|
||||
/// If this is the first occurrence of this glyph in this font, it will
|
||||
/// start its encoding and add it to the list of known glyphs.
|
||||
pub fn get(
|
||||
&mut self,
|
||||
options: &PdfOptions,
|
||||
text: &TextItemView,
|
||||
glyph: &Glyph,
|
||||
) -> SourceResult<(usize, u8)> {
|
||||
let font = &text.item.font;
|
||||
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
|
||||
let global_bbox = font.ttf().global_bounding_box();
|
||||
let bbox = Rect::new(
|
||||
font.to_em(global_bbox.x_min).to_font_units(),
|
||||
font.to_em(global_bbox.y_min).to_font_units(),
|
||||
font.to_em(global_bbox.x_max).to_font_units(),
|
||||
font.to_em(global_bbox.y_max).to_font_units(),
|
||||
);
|
||||
ColorFont {
|
||||
bbox,
|
||||
slice_ids: Vec::new(),
|
||||
glyphs: Vec::new(),
|
||||
glyph_indices: HashMap::new(),
|
||||
}
|
||||
});
|
||||
|
||||
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&glyph.id) {
|
||||
// If we already know this glyph, return it.
|
||||
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
|
||||
} else {
|
||||
// Otherwise, allocate a new ColorGlyph in the font, and a new Type3 font
|
||||
// if needed
|
||||
let index = color_font.glyphs.len();
|
||||
if index % 256 == 0 {
|
||||
color_font.slice_ids.push(self.total_slice_count);
|
||||
self.total_slice_count += 1;
|
||||
}
|
||||
|
||||
let (frame, tofu) = glyph_frame(font, glyph.id);
|
||||
if options.standards.pdfa && tofu {
|
||||
bail!(failed_to_convert(text, glyph));
|
||||
}
|
||||
|
||||
let width = font.advance(glyph.id).unwrap_or(Em::new(0.0)).get()
|
||||
* font.units_per_em();
|
||||
let instructions = content::build(
|
||||
options,
|
||||
&mut self.resources,
|
||||
&frame,
|
||||
None,
|
||||
Some(width as f32),
|
||||
)?;
|
||||
color_font.glyphs.push(ColorGlyph { gid: glyph.id, instructions });
|
||||
color_font.glyph_indices.insert(glyph.id, index);
|
||||
|
||||
(color_font.slice_ids[index / 256], index as u8)
|
||||
})
|
||||
}
|
||||
|
||||
/// Assign references to the resource dictionary used by this set of color
|
||||
/// fonts.
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> ColorFontMap<Ref> {
|
||||
ColorFontMap {
|
||||
map: self.map,
|
||||
resources: self.resources.with_refs(refs),
|
||||
total_slice_count: self.total_slice_count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> ColorFontMap<R> {
|
||||
/// Iterate over all Type3 fonts.
|
||||
///
|
||||
/// Each item of this iterator maps to a Type3 font: it contains
|
||||
/// at most 256 glyphs. A same TTF font can yield multiple Type3 fonts.
|
||||
pub fn iter(&self) -> ColorFontMapIter<'_, R> {
|
||||
ColorFontMapIter { map: self, font_index: 0, slice_index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over a [`ColorFontMap`].
|
||||
///
|
||||
/// See [`ColorFontMap::iter`].
|
||||
pub struct ColorFontMapIter<'a, R> {
|
||||
/// The map over which to iterate
|
||||
map: &'a ColorFontMap<R>,
|
||||
/// The index of TTF font on which we currently iterate
|
||||
font_index: usize,
|
||||
/// The sub-font (slice of at most 256 glyphs) at which we currently are.
|
||||
slice_index: usize,
|
||||
}
|
||||
|
||||
impl<'a, R> Iterator for ColorFontMapIter<'a, R> {
|
||||
type Item = (&'a ColorFont, ColorFontSlice);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (font, color_font) = self.map.map.get_index(self.font_index)?;
|
||||
let slice_count = (color_font.glyphs.len() / 256) + 1;
|
||||
|
||||
if self.slice_index >= slice_count {
|
||||
self.font_index += 1;
|
||||
self.slice_index = 0;
|
||||
return self.next();
|
||||
}
|
||||
|
||||
let slice = ColorFontSlice { font: font.clone(), subfont: self.slice_index };
|
||||
self.slice_index += 1;
|
||||
Some((color_font, slice))
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of at most 256 glyphs (a limit imposed on Type3 fonts by the PDF
|
||||
/// specification) that represents a part of a TTF font.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct ColorFontSlice {
|
||||
/// The original TTF font.
|
||||
pub font: Font,
|
||||
/// The index of the Type3 font, among all those that are necessary to
|
||||
/// represent the subset of the TTF font we are interested in.
|
||||
pub subfont: usize,
|
||||
}
|
||||
|
||||
/// The error when the glyph could not be converted.
|
||||
#[cold]
|
||||
fn failed_to_convert(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
|
||||
let mut diag = error!(
|
||||
glyph.span.0,
|
||||
"the glyph for {} could not be exported",
|
||||
text.glyph_text(glyph).repr()
|
||||
);
|
||||
|
||||
if text.item.font.ttf().tables().cff2.is_some() {
|
||||
diag.hint("CFF2 fonts are not currently supported");
|
||||
}
|
||||
|
||||
diag
|
||||
}
|
@ -1,823 +0,0 @@
|
||||
//! Generic writer for PDF content.
|
||||
//!
|
||||
//! It is used to write page contents, color glyph instructions, and tilings.
|
||||
//!
|
||||
//! See also [`pdf_writer::Content`].
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{
|
||||
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
|
||||
};
|
||||
use pdf_writer::writers::PositionedItems;
|
||||
use pdf_writer::{Content, Finish, Name, Rect, Str};
|
||||
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
|
||||
use typst_library::foundations::Repr;
|
||||
use typst_library::layout::{
|
||||
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
|
||||
};
|
||||
use typst_library::model::Destination;
|
||||
use typst_library::text::color::should_outline;
|
||||
use typst_library::text::{Font, Glyph, TextItem, TextItemView};
|
||||
use typst_library::visualize::{
|
||||
Curve, CurveItem, FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint,
|
||||
Shape,
|
||||
};
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::{Deferred, Numeric, SliceExt};
|
||||
|
||||
use crate::color::PaintEncode;
|
||||
use crate::color_font::ColorFontMap;
|
||||
use crate::extg::ExtGState;
|
||||
use crate::image::deferred_image;
|
||||
use crate::resources::Resources;
|
||||
use crate::{deflate_deferred, AbsExt, ContentExt, EmExt, PdfOptions, StrExt};
|
||||
|
||||
/// Encode a [`Frame`] into a content stream.
|
||||
///
|
||||
/// The resources that were used in the stream will be added to `resources`.
|
||||
///
|
||||
/// `color_glyph_width` should be `None` unless the `Frame` represents a [color
|
||||
/// glyph].
|
||||
///
|
||||
/// [color glyph]: `crate::color_font`
|
||||
pub fn build(
|
||||
options: &PdfOptions,
|
||||
resources: &mut Resources<()>,
|
||||
frame: &Frame,
|
||||
fill: Option<Paint>,
|
||||
color_glyph_width: Option<f32>,
|
||||
) -> SourceResult<Encoded> {
|
||||
let size = frame.size();
|
||||
let mut ctx = Builder::new(options, resources, size);
|
||||
|
||||
if let Some(width) = color_glyph_width {
|
||||
ctx.content.start_color_glyph(width);
|
||||
}
|
||||
|
||||
// Make the coordinate system start at the top-left.
|
||||
ctx.transform(
|
||||
// Make the Y axis go upwards
|
||||
Transform::scale(Ratio::one(), -Ratio::one())
|
||||
// Also move the origin to the top left corner
|
||||
.post_concat(Transform::translate(Abs::zero(), size.y)),
|
||||
);
|
||||
|
||||
if let Some(fill) = fill {
|
||||
let shape = Geometry::Rect(frame.size()).filled(fill);
|
||||
write_shape(&mut ctx, Point::zero(), &shape)?;
|
||||
}
|
||||
|
||||
// Encode the frame into the content stream.
|
||||
write_frame(&mut ctx, frame)?;
|
||||
|
||||
Ok(Encoded {
|
||||
size,
|
||||
content: deflate_deferred(ctx.content.finish()),
|
||||
uses_opacities: ctx.uses_opacities,
|
||||
links: ctx.links,
|
||||
})
|
||||
}
|
||||
|
||||
/// An encoded content stream.
|
||||
pub struct Encoded {
|
||||
/// The dimensions of the content.
|
||||
pub size: Size,
|
||||
/// The actual content stream.
|
||||
pub content: Deferred<Vec<u8>>,
|
||||
/// Whether the content opacities.
|
||||
pub uses_opacities: bool,
|
||||
/// Links in the PDF coordinate system.
|
||||
pub links: Vec<(Destination, Rect)>,
|
||||
}
|
||||
|
||||
/// An exporter for a single PDF content stream.
|
||||
///
|
||||
/// Content streams are a series of PDF commands. They can reference external
|
||||
/// objects only through resources.
|
||||
///
|
||||
/// Content streams can be used for page contents, but also to describe color
|
||||
/// glyphs and tilings.
|
||||
pub struct Builder<'a, R = ()> {
|
||||
/// Settings for PDF export.
|
||||
pub(crate) options: &'a PdfOptions<'a>,
|
||||
/// A list of all resources that are used in the content stream.
|
||||
pub(crate) resources: &'a mut Resources<R>,
|
||||
/// The PDF content stream that is being built.
|
||||
pub content: Content,
|
||||
/// Current graphic state.
|
||||
state: State,
|
||||
/// Stack of saved graphic states.
|
||||
saves: Vec<State>,
|
||||
/// Whether any stroke or fill was not totally opaque.
|
||||
uses_opacities: bool,
|
||||
/// All clickable links that are present in this content.
|
||||
links: Vec<(Destination, Rect)>,
|
||||
}
|
||||
|
||||
impl<'a, R> Builder<'a, R> {
|
||||
/// Create a new content builder.
|
||||
pub fn new(
|
||||
options: &'a PdfOptions<'a>,
|
||||
resources: &'a mut Resources<R>,
|
||||
size: Size,
|
||||
) -> Self {
|
||||
Builder {
|
||||
options,
|
||||
resources,
|
||||
uses_opacities: false,
|
||||
content: Content::new(),
|
||||
state: State::new(size),
|
||||
saves: vec![],
|
||||
links: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A simulated graphics state used to deduplicate graphics state changes and
|
||||
/// keep track of the current transformation matrix for link annotations.
|
||||
#[derive(Debug, Clone)]
|
||||
struct State {
|
||||
/// The transform of the current item.
|
||||
transform: Transform,
|
||||
/// The transform of first hard frame in the hierarchy.
|
||||
container_transform: Transform,
|
||||
/// The size of the first hard frame in the hierarchy.
|
||||
size: Size,
|
||||
/// The current font.
|
||||
font: Option<(Font, Abs)>,
|
||||
/// The current fill paint.
|
||||
fill: Option<Paint>,
|
||||
/// The color space of the current fill paint.
|
||||
fill_space: Option<Name<'static>>,
|
||||
/// The current external graphic state.
|
||||
external_graphics_state: ExtGState,
|
||||
/// The current stroke paint.
|
||||
stroke: Option<FixedStroke>,
|
||||
/// The color space of the current stroke paint.
|
||||
stroke_space: Option<Name<'static>>,
|
||||
/// The current text rendering mode.
|
||||
text_rendering_mode: TextRenderingMode,
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Creates a new, clean state for a given `size`.
|
||||
pub fn new(size: Size) -> Self {
|
||||
Self {
|
||||
transform: Transform::identity(),
|
||||
container_transform: Transform::identity(),
|
||||
size,
|
||||
font: None,
|
||||
fill: None,
|
||||
fill_space: None,
|
||||
external_graphics_state: ExtGState::default(),
|
||||
stroke: None,
|
||||
stroke_space: None,
|
||||
text_rendering_mode: TextRenderingMode::Fill,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the [`Transforms`] structure for the current item.
|
||||
pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
|
||||
Transforms {
|
||||
transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
|
||||
container_transform: self.container_transform,
|
||||
container_size: self.size,
|
||||
size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Subset of the state used to calculate the transform of gradients and tilings.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Transforms {
|
||||
/// The transform of the current item.
|
||||
pub transform: Transform,
|
||||
/// The transform of first hard frame in the hierarchy.
|
||||
pub container_transform: Transform,
|
||||
/// The size of the first hard frame in the hierarchy.
|
||||
pub container_size: Size,
|
||||
/// The size of the item.
|
||||
pub size: Size,
|
||||
}
|
||||
|
||||
impl Builder<'_, ()> {
|
||||
fn save_state(&mut self) -> SourceResult<()> {
|
||||
self.saves.push(self.state.clone());
|
||||
self.content.save_state_checked()
|
||||
}
|
||||
|
||||
fn restore_state(&mut self) {
|
||||
self.content.restore_state();
|
||||
self.state = self.saves.pop().expect("missing state save");
|
||||
}
|
||||
|
||||
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
|
||||
let current_state = &self.state.external_graphics_state;
|
||||
if current_state != graphics_state {
|
||||
let index = self.resources.ext_gs.insert(*graphics_state);
|
||||
let name = eco_format!("Gs{index}");
|
||||
self.content.set_parameters(Name(name.as_bytes()));
|
||||
|
||||
self.state.external_graphics_state = *graphics_state;
|
||||
if graphics_state.uses_opacities() {
|
||||
self.uses_opacities = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
|
||||
let get_opacity = |paint: &Paint| {
|
||||
let color = match paint {
|
||||
Paint::Solid(color) => *color,
|
||||
Paint::Gradient(_) | Paint::Tiling(_) => return 255,
|
||||
};
|
||||
|
||||
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
|
||||
};
|
||||
|
||||
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
|
||||
let fill_opacity = fill.map_or(255, get_opacity);
|
||||
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
|
||||
}
|
||||
|
||||
fn reset_opacities(&mut self) {
|
||||
self.set_external_graphics_state(&ExtGState {
|
||||
stroke_opacity: 255,
|
||||
fill_opacity: 255,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn transform(&mut self, transform: Transform) {
|
||||
let Transform { sx, ky, kx, sy, tx, ty } = transform;
|
||||
self.state.transform = self.state.transform.pre_concat(transform);
|
||||
if self.state.container_transform.is_identity() {
|
||||
self.state.container_transform = self.state.transform;
|
||||
}
|
||||
self.content.transform([
|
||||
sx.get() as _,
|
||||
ky.get() as _,
|
||||
kx.get() as _,
|
||||
sy.get() as _,
|
||||
tx.to_f32(),
|
||||
ty.to_f32(),
|
||||
]);
|
||||
}
|
||||
|
||||
fn group_transform(&mut self, transform: Transform) {
|
||||
self.state.container_transform =
|
||||
self.state.container_transform.pre_concat(transform);
|
||||
}
|
||||
|
||||
fn set_font(&mut self, font: &Font, size: Abs) {
|
||||
if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
|
||||
let index = self.resources.fonts.insert(font.clone());
|
||||
let name = eco_format!("F{index}");
|
||||
self.content.set_font(Name(name.as_bytes()), size.to_f32());
|
||||
self.state.font = Some((font.clone(), size));
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&mut self, size: Size) {
|
||||
self.state.size = size;
|
||||
}
|
||||
|
||||
fn set_fill(
|
||||
&mut self,
|
||||
fill: &Paint,
|
||||
on_text: bool,
|
||||
transforms: Transforms,
|
||||
) -> SourceResult<()> {
|
||||
if self.state.fill.as_ref() != Some(fill)
|
||||
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
|
||||
{
|
||||
fill.set_as_fill(self, on_text, transforms)?;
|
||||
self.state.fill = Some(fill.clone());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
|
||||
if self.state.fill_space != Some(space) {
|
||||
self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
|
||||
self.state.fill_space = Some(space);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_fill_color_space(&mut self) {
|
||||
self.state.fill_space = None;
|
||||
}
|
||||
|
||||
fn set_stroke(
|
||||
&mut self,
|
||||
stroke: &FixedStroke,
|
||||
on_text: bool,
|
||||
transforms: Transforms,
|
||||
) -> SourceResult<()> {
|
||||
if self.state.stroke.as_ref() != Some(stroke)
|
||||
|| matches!(
|
||||
self.state.stroke.as_ref().map(|s| &s.paint),
|
||||
Some(Paint::Gradient(_))
|
||||
)
|
||||
{
|
||||
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
|
||||
paint.set_as_stroke(self, on_text, transforms)?;
|
||||
|
||||
self.content.set_line_width(thickness.to_f32());
|
||||
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
|
||||
self.content.set_line_cap(to_pdf_line_cap(*cap));
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.join) != Some(join) {
|
||||
self.content.set_line_join(to_pdf_line_join(*join));
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.dash) != Some(dash) {
|
||||
if let Some(dash) = dash {
|
||||
self.content.set_dash_pattern(
|
||||
dash.array.iter().map(|l| l.to_f32()),
|
||||
dash.phase.to_f32(),
|
||||
);
|
||||
} else {
|
||||
self.content.set_dash_pattern([], 0.0);
|
||||
}
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
|
||||
self.content.set_miter_limit(miter_limit.get() as f32);
|
||||
}
|
||||
self.state.stroke = Some(stroke.clone());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
|
||||
if self.state.stroke_space != Some(space) {
|
||||
self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
|
||||
self.state.stroke_space = Some(space);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_stroke_color_space(&mut self) {
|
||||
self.state.stroke_space = None;
|
||||
}
|
||||
|
||||
fn set_text_rendering_mode(&mut self, mode: TextRenderingMode) {
|
||||
if self.state.text_rendering_mode != mode {
|
||||
self.content.set_text_rendering_mode(mode);
|
||||
self.state.text_rendering_mode = mode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode a frame into the content stream.
|
||||
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
|
||||
for &(pos, ref item) in frame.items() {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
match item {
|
||||
FrameItem::Group(group) => write_group(ctx, pos, group)?,
|
||||
FrameItem::Text(text) => write_text(ctx, pos, text)?,
|
||||
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
|
||||
FrameItem::Image(image, size, span) => {
|
||||
write_image(ctx, x, y, image, *size, *span)?
|
||||
}
|
||||
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
|
||||
FrameItem::Tag(_) => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a group into the content stream.
|
||||
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
|
||||
let translation = Transform::translate(pos.x, pos.y);
|
||||
|
||||
ctx.save_state()?;
|
||||
|
||||
if group.frame.kind().is_hard() {
|
||||
ctx.group_transform(
|
||||
ctx.state
|
||||
.transform
|
||||
.post_concat(ctx.state.container_transform.invert().unwrap())
|
||||
.pre_concat(translation)
|
||||
.pre_concat(group.transform),
|
||||
);
|
||||
ctx.size(group.frame.size());
|
||||
}
|
||||
|
||||
ctx.transform(translation.pre_concat(group.transform));
|
||||
if let Some(clip_curve) = &group.clip {
|
||||
write_curve(ctx, 0.0, 0.0, clip_curve);
|
||||
ctx.content.clip_nonzero();
|
||||
ctx.content.end_path();
|
||||
}
|
||||
|
||||
write_frame(ctx, &group.frame)?;
|
||||
ctx.restore_state();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a text run into the content stream.
|
||||
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
|
||||
if ctx.options.standards.pdfa && text.font.info().is_last_resort() {
|
||||
bail!(
|
||||
Span::find(text.glyphs.iter().map(|g| g.span.0)),
|
||||
"the text {} could not be displayed with any font",
|
||||
&text.text,
|
||||
);
|
||||
}
|
||||
|
||||
let outline_glyphs =
|
||||
text.glyphs.iter().filter(|g| should_outline(&text.font, g)).count();
|
||||
|
||||
if outline_glyphs == text.glyphs.len() {
|
||||
write_normal_text(ctx, pos, TextItemView::full(text))?;
|
||||
} else if outline_glyphs == 0 {
|
||||
write_complex_glyphs(ctx, pos, TextItemView::full(text))?;
|
||||
} else {
|
||||
// Otherwise we need to split it into smaller text runs.
|
||||
let mut offset = 0;
|
||||
let mut position_in_run = Abs::zero();
|
||||
for (should_outline, sub_run) in
|
||||
text.glyphs.group_by_key(|g| should_outline(&text.font, g))
|
||||
{
|
||||
let end = offset + sub_run.len();
|
||||
|
||||
// Build a sub text-run
|
||||
let text_item_view = TextItemView::from_glyph_range(text, offset..end);
|
||||
|
||||
// Adjust the position of the run on the line
|
||||
let pos = pos + Point::new(position_in_run, Abs::zero());
|
||||
position_in_run += text_item_view.width();
|
||||
offset = end;
|
||||
|
||||
// Actually write the sub text-run.
|
||||
if should_outline {
|
||||
write_normal_text(ctx, pos, text_item_view)?;
|
||||
} else {
|
||||
write_complex_glyphs(ctx, pos, text_item_view)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encodes a text run (without any color glyph) into the content stream.
|
||||
fn write_normal_text(
|
||||
ctx: &mut Builder,
|
||||
pos: Point,
|
||||
text: TextItemView,
|
||||
) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
*ctx.resources.languages.entry(text.item.lang).or_insert(0) += text.glyph_range.len();
|
||||
|
||||
let glyph_set = ctx.resources.glyph_sets.entry(text.item.font.clone()).or_default();
|
||||
for g in text.glyphs() {
|
||||
glyph_set.entry(g.id).or_insert_with(|| text.glyph_text(g));
|
||||
}
|
||||
|
||||
let fill_transform = ctx.state.transforms(Size::zero(), pos);
|
||||
ctx.set_fill(&text.item.fill, true, fill_transform)?;
|
||||
|
||||
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
|
||||
if stroke.thickness.to_f32() > 0.0 {
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(stroke) = stroke {
|
||||
ctx.set_stroke(stroke, true, fill_transform)?;
|
||||
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
|
||||
} else {
|
||||
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
|
||||
}
|
||||
|
||||
ctx.set_font(&text.item.font, text.item.size);
|
||||
ctx.set_opacities(text.item.stroke.as_ref(), Some(&text.item.fill));
|
||||
ctx.content.begin_text();
|
||||
|
||||
// Position the text.
|
||||
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
|
||||
|
||||
let mut positioned = ctx.content.show_positioned();
|
||||
let mut items = positioned.items();
|
||||
let mut adjustment = Em::zero();
|
||||
let mut encoded = vec![];
|
||||
|
||||
let glyph_remapper = ctx
|
||||
.resources
|
||||
.glyph_remappers
|
||||
.entry(text.item.font.clone())
|
||||
.or_default();
|
||||
|
||||
// Write the glyphs with kerning adjustments.
|
||||
for glyph in text.glyphs() {
|
||||
if ctx.options.standards.pdfa && glyph.id == 0 {
|
||||
bail!(tofu(&text, glyph));
|
||||
}
|
||||
|
||||
adjustment += glyph.x_offset;
|
||||
|
||||
if !adjustment.is_zero() {
|
||||
if !encoded.is_empty() {
|
||||
show_text(&mut items, &encoded);
|
||||
encoded.clear();
|
||||
}
|
||||
|
||||
items.adjust(-adjustment.to_font_units());
|
||||
adjustment = Em::zero();
|
||||
}
|
||||
|
||||
// In PDF, we use CIDs to index the glyphs in a font, not GIDs. What a
|
||||
// CID actually refers to depends on the type of font we are embedding:
|
||||
//
|
||||
// - For TrueType fonts, the CIDs are defined by an external mapping.
|
||||
// - For SID-keyed CFF fonts, the CID is the same as the GID in the font.
|
||||
// - For CID-keyed CFF fonts, the CID refers to the CID in the font.
|
||||
//
|
||||
// (See in the PDF-spec for more details on this.)
|
||||
//
|
||||
// However, in our case:
|
||||
// - We use the identity-mapping for TrueType fonts.
|
||||
// - SID-keyed fonts will get converted into CID-keyed fonts by the
|
||||
// subsetter.
|
||||
// - CID-keyed fonts will be rewritten in a way so that the mapping
|
||||
// between CID and GID is always the identity mapping, regardless of
|
||||
// the mapping before.
|
||||
//
|
||||
// Because of this, we can always use the remapped GID as the CID,
|
||||
// regardless of which type of font we are actually embedding.
|
||||
let cid = glyph_remapper.remap(glyph.id);
|
||||
encoded.push((cid >> 8) as u8);
|
||||
encoded.push((cid & 0xff) as u8);
|
||||
|
||||
if let Some(advance) = text.item.font.advance(glyph.id) {
|
||||
adjustment += glyph.x_advance - advance;
|
||||
}
|
||||
|
||||
adjustment -= glyph.x_offset;
|
||||
}
|
||||
|
||||
if !encoded.is_empty() {
|
||||
show_text(&mut items, &encoded);
|
||||
}
|
||||
|
||||
items.finish();
|
||||
positioned.finish();
|
||||
ctx.content.end_text();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Shows text, ensuring that each individual string doesn't exceed the
|
||||
/// implementation limits.
|
||||
fn show_text(items: &mut PositionedItems, encoded: &[u8]) {
|
||||
for chunk in encoded.chunks(Str::PDFA_LIMIT) {
|
||||
items.show(Str(chunk));
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes a text run made only of color glyphs into the content stream
|
||||
fn write_complex_glyphs(
|
||||
ctx: &mut Builder,
|
||||
pos: Point,
|
||||
text: TextItemView,
|
||||
) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
let mut last_font = None;
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
ctx.content.begin_text();
|
||||
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
|
||||
// So that the next call to ctx.set_font() will change the font to one that
|
||||
// displays regular glyphs and not color glyphs.
|
||||
ctx.state.font = None;
|
||||
|
||||
let glyph_set = ctx
|
||||
.resources
|
||||
.color_glyph_sets
|
||||
.entry(text.item.font.clone())
|
||||
.or_default();
|
||||
|
||||
for glyph in text.glyphs() {
|
||||
if ctx.options.standards.pdfa && glyph.id == 0 {
|
||||
bail!(tofu(&text, glyph));
|
||||
}
|
||||
|
||||
// Retrieve the Type3 font reference and the glyph index in the font.
|
||||
let color_fonts = ctx
|
||||
.resources
|
||||
.color_fonts
|
||||
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
|
||||
|
||||
let (font, index) = color_fonts.get(ctx.options, &text, glyph)?;
|
||||
|
||||
if last_font != Some(font) {
|
||||
ctx.content.set_font(
|
||||
Name(eco_format!("Cf{}", font).as_bytes()),
|
||||
text.item.size.to_f32(),
|
||||
);
|
||||
last_font = Some(font);
|
||||
}
|
||||
|
||||
ctx.content.show(Str(&[index]));
|
||||
|
||||
glyph_set.entry(glyph.id).or_insert_with(|| text.glyph_text(glyph));
|
||||
}
|
||||
ctx.content.end_text();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a geometrical shape into the content stream.
|
||||
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
let stroke = shape.stroke.as_ref().and_then(|stroke| {
|
||||
if stroke.thickness.to_f32() > 0.0 {
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if shape.fill.is_none() && stroke.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(fill) = &shape.fill {
|
||||
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
|
||||
}
|
||||
|
||||
if let Some(stroke) = stroke {
|
||||
ctx.set_stroke(
|
||||
stroke,
|
||||
false,
|
||||
ctx.state.transforms(shape.geometry.bbox_size(), pos),
|
||||
)?;
|
||||
}
|
||||
|
||||
ctx.set_opacities(stroke, shape.fill.as_ref());
|
||||
|
||||
match &shape.geometry {
|
||||
Geometry::Line(target) => {
|
||||
let dx = target.x.to_f32();
|
||||
let dy = target.y.to_f32();
|
||||
ctx.content.move_to(x, y);
|
||||
ctx.content.line_to(x + dx, y + dy);
|
||||
}
|
||||
Geometry::Rect(size) => {
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
if w.abs() > f32::EPSILON && h.abs() > f32::EPSILON {
|
||||
ctx.content.rect(x, y, w, h);
|
||||
}
|
||||
}
|
||||
Geometry::Curve(curve) => {
|
||||
write_curve(ctx, x, y, curve);
|
||||
}
|
||||
}
|
||||
|
||||
match (&shape.fill, &shape.fill_rule, stroke) {
|
||||
(None, _, None) => unreachable!(),
|
||||
(Some(_), FillRule::NonZero, None) => ctx.content.fill_nonzero(),
|
||||
(Some(_), FillRule::EvenOdd, None) => ctx.content.fill_even_odd(),
|
||||
(None, _, Some(_)) => ctx.content.stroke(),
|
||||
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
|
||||
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a curve into the content stream.
|
||||
fn write_curve(ctx: &mut Builder, x: f32, y: f32, curve: &Curve) {
|
||||
for elem in &curve.0 {
|
||||
match elem {
|
||||
CurveItem::Move(p) => ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32()),
|
||||
CurveItem::Line(p) => ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32()),
|
||||
CurveItem::Cubic(p1, p2, p3) => ctx.content.cubic_to(
|
||||
x + p1.x.to_f32(),
|
||||
y + p1.y.to_f32(),
|
||||
x + p2.x.to_f32(),
|
||||
y + p2.y.to_f32(),
|
||||
x + p3.x.to_f32(),
|
||||
y + p3.y.to_f32(),
|
||||
),
|
||||
CurveItem::Close => ctx.content.close_path(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode a vector or raster image into the content stream.
|
||||
fn write_image(
|
||||
ctx: &mut Builder,
|
||||
x: f32,
|
||||
y: f32,
|
||||
image: &Image,
|
||||
size: Size,
|
||||
span: Span,
|
||||
) -> SourceResult<()> {
|
||||
let index = ctx.resources.images.insert(image.clone());
|
||||
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
|
||||
let (image, color_space) =
|
||||
deferred_image(image.clone(), ctx.options.standards.pdfa);
|
||||
if let Some(color_space) = color_space {
|
||||
ctx.resources.colors.mark_as_used(color_space);
|
||||
}
|
||||
(image, span)
|
||||
});
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
let name = eco_format!("Im{index}");
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
ctx.content.save_state_checked()?;
|
||||
ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
|
||||
|
||||
if let Some(alt) = image.alt() {
|
||||
if ctx.options.standards.pdfa && alt.len() > Str::PDFA_LIMIT {
|
||||
bail!(span, "the image's alt text is too long");
|
||||
}
|
||||
|
||||
let mut image_span =
|
||||
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
|
||||
let mut image_alt = image_span.properties();
|
||||
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
|
||||
image_alt.finish();
|
||||
image_span.finish();
|
||||
|
||||
ctx.content.x_object(Name(name.as_bytes()));
|
||||
ctx.content.end_marked_content();
|
||||
} else {
|
||||
ctx.content.x_object(Name(name.as_bytes()));
|
||||
}
|
||||
|
||||
ctx.content.restore_state();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save a link for later writing in the annotations dictionary.
|
||||
fn write_link(ctx: &mut Builder, pos: Point, dest: &Destination, size: Size) {
|
||||
let mut min_x = Abs::inf();
|
||||
let mut min_y = Abs::inf();
|
||||
let mut max_x = -Abs::inf();
|
||||
let mut max_y = -Abs::inf();
|
||||
|
||||
// Compute the bounding box of the transformed link.
|
||||
for point in [
|
||||
pos,
|
||||
pos + Point::with_x(size.x),
|
||||
pos + Point::with_y(size.y),
|
||||
pos + size.to_point(),
|
||||
] {
|
||||
let t = point.transform(ctx.state.transform);
|
||||
min_x.set_min(t.x);
|
||||
min_y.set_min(t.y);
|
||||
max_x.set_max(t.x);
|
||||
max_y.set_max(t.y);
|
||||
}
|
||||
|
||||
let x1 = min_x.to_f32();
|
||||
let x2 = max_x.to_f32();
|
||||
let y1 = max_y.to_f32();
|
||||
let y2 = min_y.to_f32();
|
||||
let rect = Rect::new(x1, y1, x2, y2);
|
||||
|
||||
ctx.links.push((dest.clone(), rect));
|
||||
}
|
||||
|
||||
fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
|
||||
match cap {
|
||||
LineCap::Butt => LineCapStyle::ButtCap,
|
||||
LineCap::Round => LineCapStyle::RoundCap,
|
||||
LineCap::Square => LineCapStyle::ProjectingSquareCap,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
|
||||
match join {
|
||||
LineJoin::Miter => LineJoinStyle::MiterJoin,
|
||||
LineJoin::Round => LineJoinStyle::RoundJoin,
|
||||
LineJoin::Bevel => LineJoinStyle::BevelJoin,
|
||||
}
|
||||
}
|
||||
|
||||
/// The error when there is a tofu glyph.
|
||||
#[cold]
|
||||
fn tofu(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
|
||||
error!(
|
||||
glyph.span.0,
|
||||
"the text {} could not be displayed with any font",
|
||||
text.glyph_text(glyph).repr(),
|
||||
)
|
||||
}
|
661
crates/typst-pdf/src/convert.rs
Normal file
@ -0,0 +1,661 @@
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::num::NonZeroU64;
|
||||
|
||||
use ecow::{eco_format, EcoVec};
|
||||
use krilla::annotation::Annotation;
|
||||
use krilla::configure::{Configuration, ValidationError, Validator};
|
||||
use krilla::destination::{NamedDestination, XyzDestination};
|
||||
use krilla::embed::EmbedError;
|
||||
use krilla::error::KrillaError;
|
||||
use krilla::geom::PathBuilder;
|
||||
use krilla::page::{PageLabel, PageSettings};
|
||||
use krilla::surface::Surface;
|
||||
use krilla::{Document, SerializeSettings};
|
||||
use krilla_svg::render_svg_glyph;
|
||||
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
|
||||
use typst_library::foundations::NativeElement;
|
||||
use typst_library::introspection::Location;
|
||||
use typst_library::layout::{
|
||||
Abs, Frame, FrameItem, GroupItem, PagedDocument, Size, Transform,
|
||||
};
|
||||
use typst_library::model::HeadingElem;
|
||||
use typst_library::text::{Font, Lang};
|
||||
use typst_library::visualize::{Geometry, Paint};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::embed::embed_files;
|
||||
use crate::image::handle_image;
|
||||
use crate::link::handle_link;
|
||||
use crate::metadata::build_metadata;
|
||||
use crate::outline::build_outline;
|
||||
use crate::page::PageLabelExt;
|
||||
use crate::shape::handle_shape;
|
||||
use crate::text::handle_text;
|
||||
use crate::util::{convert_path, display_font, AbsExt, TransformExt};
|
||||
use crate::PdfOptions;
|
||||
|
||||
#[typst_macros::time(name = "convert document")]
|
||||
pub fn convert(
|
||||
typst_document: &PagedDocument,
|
||||
options: &PdfOptions,
|
||||
) -> SourceResult<Vec<u8>> {
|
||||
let settings = SerializeSettings {
|
||||
compress_content_streams: true,
|
||||
no_device_cs: true,
|
||||
ascii_compatible: false,
|
||||
xmp_metadata: true,
|
||||
cmyk_profile: None,
|
||||
configuration: options.standards.config,
|
||||
enable_tagging: false,
|
||||
render_svg_glyph_fn: render_svg_glyph,
|
||||
};
|
||||
|
||||
let mut document = Document::new_with(settings);
|
||||
let page_index_converter = PageIndexConverter::new(typst_document, options);
|
||||
let named_destinations =
|
||||
collect_named_destinations(typst_document, &page_index_converter);
|
||||
let mut gc = GlobalContext::new(
|
||||
typst_document,
|
||||
options,
|
||||
named_destinations,
|
||||
page_index_converter,
|
||||
);
|
||||
|
||||
convert_pages(&mut gc, &mut document)?;
|
||||
embed_files(typst_document, &mut document)?;
|
||||
|
||||
document.set_outline(build_outline(&gc));
|
||||
document.set_metadata(build_metadata(&gc));
|
||||
|
||||
finish(document, gc, options.standards.config)
|
||||
}
|
||||
|
||||
fn convert_pages(gc: &mut GlobalContext, document: &mut Document) -> SourceResult<()> {
|
||||
for (i, typst_page) in gc.document.pages.iter().enumerate() {
|
||||
if gc.page_index_converter.pdf_page_index(i).is_none() {
|
||||
// Don't export this page.
|
||||
continue;
|
||||
} else {
|
||||
let mut settings = PageSettings::new(
|
||||
typst_page.frame.width().to_f32(),
|
||||
typst_page.frame.height().to_f32(),
|
||||
);
|
||||
|
||||
if let Some(label) = typst_page
|
||||
.numbering
|
||||
.as_ref()
|
||||
.and_then(|num| PageLabel::generate(num, typst_page.number))
|
||||
.or_else(|| {
|
||||
// When some pages were ignored from export, we show a page label with
|
||||
// the correct real (not logical) page number.
|
||||
// This is for consistency with normal output when pages have no numbering
|
||||
// and all are exported: the final PDF page numbers always correspond to
|
||||
// the real (not logical) page numbers. Here, the final PDF page number
|
||||
// will differ, but we can at least use labels to indicate what was
|
||||
// the corresponding real page number in the Typst document.
|
||||
gc.page_index_converter
|
||||
.has_skipped_pages()
|
||||
.then(|| PageLabel::arabic((i + 1) as u64))
|
||||
})
|
||||
{
|
||||
settings = settings.with_page_label(label);
|
||||
}
|
||||
|
||||
let mut page = document.start_page_with(settings);
|
||||
let mut surface = page.surface();
|
||||
let mut fc = FrameContext::new(typst_page.frame.size());
|
||||
|
||||
handle_frame(
|
||||
&mut fc,
|
||||
&typst_page.frame,
|
||||
typst_page.fill_or_transparent(),
|
||||
&mut surface,
|
||||
gc,
|
||||
)?;
|
||||
|
||||
surface.finish();
|
||||
|
||||
for annotation in fc.annotations {
|
||||
page.add_annotation(annotation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// A state allowing us to keep track of transforms and container sizes,
|
||||
/// which is mainly needed to resolve gradients and patterns correctly.
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct State {
|
||||
/// The current transform.
|
||||
transform: Transform,
|
||||
/// The transform of first hard frame in the hierarchy.
|
||||
container_transform: Transform,
|
||||
/// The size of the first hard frame in the hierarchy.
|
||||
container_size: Size,
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Creates a new, clean state for a given `size`.
|
||||
fn new(size: Size) -> Self {
|
||||
Self {
|
||||
transform: Transform::identity(),
|
||||
container_transform: Transform::identity(),
|
||||
container_size: size,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn register_container(&mut self, size: Size) {
|
||||
self.container_transform = self.transform;
|
||||
self.container_size = size;
|
||||
}
|
||||
|
||||
pub(crate) fn pre_concat(&mut self, transform: Transform) {
|
||||
self.transform = self.transform.pre_concat(transform);
|
||||
}
|
||||
|
||||
pub(crate) fn transform(&self) -> Transform {
|
||||
self.transform
|
||||
}
|
||||
|
||||
pub(crate) fn container_transform(&self) -> Transform {
|
||||
self.container_transform
|
||||
}
|
||||
|
||||
pub(crate) fn container_size(&self) -> Size {
|
||||
self.container_size
|
||||
}
|
||||
}
|
||||
|
||||
/// Context needed for converting a single frame.
|
||||
pub(crate) struct FrameContext {
|
||||
states: Vec<State>,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
|
||||
impl FrameContext {
|
||||
pub(crate) fn new(size: Size) -> Self {
|
||||
Self {
|
||||
states: vec![State::new(size)],
|
||||
annotations: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn push(&mut self) {
|
||||
self.states.push(self.states.last().unwrap().clone());
|
||||
}
|
||||
|
||||
pub(crate) fn pop(&mut self) {
|
||||
self.states.pop();
|
||||
}
|
||||
|
||||
pub(crate) fn state(&self) -> &State {
|
||||
self.states.last().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn state_mut(&mut self) -> &mut State {
|
||||
self.states.last_mut().unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn push_annotation(&mut self, annotation: Annotation) {
|
||||
self.annotations.push(annotation);
|
||||
}
|
||||
}
|
||||
|
||||
/// Globally needed context for converting a typst document.
|
||||
pub(crate) struct GlobalContext<'a> {
|
||||
/// Cache the conversion between krilla and Typst fonts (forward and backward).
|
||||
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
|
||||
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
|
||||
/// Mapping between images and their span.
|
||||
// Note: In theory, the same image can have multiple spans
|
||||
// if it appears in the document multiple times. We just store the
|
||||
// first appearance, though.
|
||||
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
|
||||
/// The spans of all images that appear in the document. We use this so
|
||||
/// we can give more accurate error messages.
|
||||
pub(crate) image_spans: HashSet<Span>,
|
||||
/// The document to convert.
|
||||
pub(crate) document: &'a PagedDocument,
|
||||
/// Options for PDF export.
|
||||
pub(crate) options: &'a PdfOptions<'a>,
|
||||
/// Mapping between locations in the document and named destinations.
|
||||
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
|
||||
/// The languages used throughout the document.
|
||||
pub(crate) languages: BTreeMap<Lang, usize>,
|
||||
pub(crate) page_index_converter: PageIndexConverter,
|
||||
}
|
||||
|
||||
impl<'a> GlobalContext<'a> {
|
||||
pub(crate) fn new(
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions,
|
||||
loc_to_names: HashMap<Location, NamedDestination>,
|
||||
page_index_converter: PageIndexConverter,
|
||||
) -> GlobalContext<'a> {
|
||||
Self {
|
||||
fonts_forward: HashMap::new(),
|
||||
fonts_backward: HashMap::new(),
|
||||
document,
|
||||
options,
|
||||
loc_to_names,
|
||||
image_to_spans: HashMap::new(),
|
||||
image_spans: HashSet::new(),
|
||||
languages: BTreeMap::new(),
|
||||
page_index_converter,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[typst_macros::time(name = "handle page")]
|
||||
pub(crate) fn handle_frame(
|
||||
fc: &mut FrameContext,
|
||||
frame: &Frame,
|
||||
fill: Option<Paint>,
|
||||
surface: &mut Surface,
|
||||
gc: &mut GlobalContext,
|
||||
) -> SourceResult<()> {
|
||||
fc.push();
|
||||
|
||||
if frame.kind().is_hard() {
|
||||
fc.state_mut().register_container(frame.size());
|
||||
}
|
||||
|
||||
if let Some(fill) = fill {
|
||||
let shape = Geometry::Rect(frame.size()).filled(fill);
|
||||
handle_shape(fc, &shape, surface, gc, Span::detached())?;
|
||||
}
|
||||
|
||||
for (point, item) in frame.items() {
|
||||
fc.push();
|
||||
fc.state_mut().pre_concat(Transform::translate(point.x, point.y));
|
||||
|
||||
match item {
|
||||
FrameItem::Group(g) => handle_group(fc, g, surface, gc)?,
|
||||
FrameItem::Text(t) => handle_text(fc, t, surface, gc)?,
|
||||
FrameItem::Shape(s, span) => handle_shape(fc, s, surface, gc, *span)?,
|
||||
FrameItem::Image(image, size, span) => {
|
||||
handle_image(gc, fc, image, *size, surface, *span)?
|
||||
}
|
||||
FrameItem::Link(d, s) => handle_link(fc, gc, d, *s),
|
||||
FrameItem::Tag(_) => {}
|
||||
}
|
||||
|
||||
fc.pop();
|
||||
}
|
||||
|
||||
fc.pop();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn handle_group(
|
||||
fc: &mut FrameContext,
|
||||
group: &GroupItem,
|
||||
surface: &mut Surface,
|
||||
context: &mut GlobalContext,
|
||||
) -> SourceResult<()> {
|
||||
fc.push();
|
||||
fc.state_mut().pre_concat(group.transform);
|
||||
|
||||
let clip_path = group
|
||||
.clip
|
||||
.as_ref()
|
||||
.and_then(|p| {
|
||||
let mut builder = PathBuilder::new();
|
||||
convert_path(p, &mut builder);
|
||||
builder.finish()
|
||||
})
|
||||
.and_then(|p| p.transform(fc.state().transform.to_krilla()));
|
||||
|
||||
if let Some(clip_path) = &clip_path {
|
||||
surface.push_clip_path(clip_path, &krilla::paint::FillRule::NonZero);
|
||||
}
|
||||
|
||||
handle_frame(fc, &group.frame, None, surface, context)?;
|
||||
|
||||
if clip_path.is_some() {
|
||||
surface.pop();
|
||||
}
|
||||
|
||||
fc.pop();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[typst_macros::time(name = "finish export")]
|
||||
/// Finish a krilla document and handle export errors.
|
||||
fn finish(
|
||||
document: Document,
|
||||
gc: GlobalContext,
|
||||
configuration: Configuration,
|
||||
) -> SourceResult<Vec<u8>> {
|
||||
let validator = configuration.validator();
|
||||
|
||||
match document.finish() {
|
||||
Ok(r) => Ok(r),
|
||||
Err(e) => match e {
|
||||
KrillaError::Font(f, s) => {
|
||||
let font_str = display_font(gc.fonts_backward.get(&f).unwrap());
|
||||
bail!(
|
||||
Span::detached(),
|
||||
"failed to process font {font_str}: {s}";
|
||||
hint: "make sure the font is valid";
|
||||
hint: "the used font might be unsupported by Typst"
|
||||
);
|
||||
}
|
||||
KrillaError::Validation(ve) => {
|
||||
let errors = ve
|
||||
.iter()
|
||||
.map(|e| convert_error(&gc, validator, e))
|
||||
.collect::<EcoVec<_>>();
|
||||
Err(errors)
|
||||
}
|
||||
KrillaError::Image(_, loc) => {
|
||||
let span = to_span(loc);
|
||||
bail!(span, "failed to process image");
|
||||
}
|
||||
KrillaError::SixteenBitImage(image, _) => {
|
||||
let span = gc.image_to_spans.get(&image).unwrap();
|
||||
bail!(
|
||||
*span, "16 bit images are not supported in this export mode";
|
||||
hint: "convert the image to 8 bit instead"
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a krilla error into a Typst error.
|
||||
fn convert_error(
|
||||
gc: &GlobalContext,
|
||||
validator: Validator,
|
||||
error: &ValidationError,
|
||||
) -> SourceDiagnostic {
|
||||
let prefix = eco_format!("{} error:", validator.as_str());
|
||||
match error {
|
||||
ValidationError::TooLongString => error!(
|
||||
Span::detached(),
|
||||
"{prefix} a PDF string is longer than 32767 characters";
|
||||
hint: "ensure title and author names are short enough"
|
||||
),
|
||||
// Should in theory never occur, as krilla always trims font names.
|
||||
ValidationError::TooLongName => error!(
|
||||
Span::detached(),
|
||||
"{prefix} a PDF name is longer than 127 characters";
|
||||
hint: "perhaps a font name is too long"
|
||||
),
|
||||
|
||||
ValidationError::TooLongArray => error!(
|
||||
Span::detached(),
|
||||
"{prefix} a PDF array is longer than 8191 elements";
|
||||
hint: "this can happen if you have a very long text in a single line"
|
||||
),
|
||||
ValidationError::TooLongDictionary => error!(
|
||||
Span::detached(),
|
||||
"{prefix} a PDF dictionary has more than 4095 entries";
|
||||
hint: "try reducing the complexity of your document"
|
||||
),
|
||||
ValidationError::TooLargeFloat => error!(
|
||||
Span::detached(),
|
||||
"{prefix} a PDF floating point number is larger than the allowed limit";
|
||||
hint: "try exporting with a higher PDF version"
|
||||
),
|
||||
ValidationError::TooManyIndirectObjects => error!(
|
||||
Span::detached(),
|
||||
"{prefix} the PDF has too many indirect objects";
|
||||
hint: "reduce the size of your document"
|
||||
),
|
||||
// Can only occur if we have 27+ nested clip paths
|
||||
ValidationError::TooHighQNestingLevel => error!(
|
||||
Span::detached(),
|
||||
"{prefix} the PDF has too high q nesting";
|
||||
hint: "reduce the number of nested containers"
|
||||
),
|
||||
ValidationError::ContainsPostScript(loc) => error!(
|
||||
to_span(*loc),
|
||||
"{prefix} the PDF contains PostScript code";
|
||||
hint: "conic gradients are not supported in this PDF standard"
|
||||
),
|
||||
ValidationError::MissingCMYKProfile => error!(
|
||||
Span::detached(),
|
||||
"{prefix} the PDF is missing a CMYK profile";
|
||||
hint: "CMYK colors are not yet supported in this export mode"
|
||||
),
|
||||
ValidationError::ContainsNotDefGlyph(f, loc, text) => error!(
|
||||
to_span(*loc),
|
||||
"{prefix} the text '{text}' cannot be displayed using {}",
|
||||
display_font(gc.fonts_backward.get(f).unwrap());
|
||||
hint: "try using a different font"
|
||||
),
|
||||
ValidationError::InvalidCodepointMapping(_, _, cp, loc) => {
|
||||
if let Some(c) = cp.map(|c| eco_format!("{:#06x}", c as u32)) {
|
||||
let msg = if loc.is_some() {
|
||||
"the PDF contains text with"
|
||||
} else {
|
||||
"the text contains"
|
||||
};
|
||||
error!(to_span(*loc), "{prefix} {msg} the disallowed codepoint {c}")
|
||||
} else {
|
||||
// I think this code path is in theory unreachable,
|
||||
// but just to be safe.
|
||||
let msg = if loc.is_some() {
|
||||
"the PDF contains text with missing codepoints"
|
||||
} else {
|
||||
"the text was not mapped to a code point"
|
||||
};
|
||||
error!(
|
||||
to_span(*loc),
|
||||
"{prefix} {msg}";
|
||||
hint: "for complex scripts like Arabic, it might not be \
|
||||
possible to produce a compliant document"
|
||||
)
|
||||
}
|
||||
}
|
||||
ValidationError::UnicodePrivateArea(_, _, c, loc) => {
|
||||
let code_point = eco_format!("{:#06x}", *c as u32);
|
||||
let msg = if loc.is_some() { "the PDF" } else { "the text" };
|
||||
error!(
|
||||
to_span(*loc),
|
||||
"{prefix} {msg} contains the codepoint {code_point}";
|
||||
hint: "codepoints from the Unicode private area are \
|
||||
forbidden in this export mode"
|
||||
)
|
||||
}
|
||||
ValidationError::Transparency(loc) => {
|
||||
let span = to_span(*loc);
|
||||
let hint1 = "try exporting with a different standard that \
|
||||
supports transparency";
|
||||
if loc.is_some() {
|
||||
if gc.image_spans.contains(&span) {
|
||||
error!(
|
||||
span, "{prefix} the image contains transparency";
|
||||
hint: "{hint1}";
|
||||
hint: "or convert the image to a non-transparent one";
|
||||
hint: "you might have to convert SVGs into \
|
||||
non-transparent bitmap images"
|
||||
)
|
||||
} else {
|
||||
error!(
|
||||
span, "{prefix} the used fill or stroke has transparency";
|
||||
hint: "{hint1}";
|
||||
hint: "or don't use colors with transparency in \
|
||||
this export mode"
|
||||
)
|
||||
}
|
||||
} else {
|
||||
error!(
|
||||
span, "{prefix} the PDF contains transparency";
|
||||
hint: "{hint1}"
|
||||
)
|
||||
}
|
||||
}
|
||||
ValidationError::ImageInterpolation(loc) => {
|
||||
let span = to_span(*loc);
|
||||
if loc.is_some() {
|
||||
error!(
|
||||
span, "{prefix} the image has smooth scaling";
|
||||
hint: "set the `scaling` attribute to `pixelated`"
|
||||
)
|
||||
} else {
|
||||
error!(
|
||||
span, "{prefix} an image in the PDF has smooth scaling";
|
||||
hint: "set the `scaling` attribute of all images to `pixelated`"
|
||||
)
|
||||
}
|
||||
}
|
||||
ValidationError::EmbeddedFile(e, s) => {
|
||||
// We always set the span for embedded files, so it cannot be detached.
|
||||
let span = to_span(*s);
|
||||
match e {
|
||||
EmbedError::Existence => {
|
||||
error!(
|
||||
span, "{prefix} document contains an embedded file";
|
||||
hint: "embedded files are not supported in this export mode"
|
||||
)
|
||||
}
|
||||
EmbedError::MissingDate => {
|
||||
error!(
|
||||
span, "{prefix} document date is missing";
|
||||
hint: "the document must have a date when embedding files";
|
||||
hint: "`set document(date: none)` must not be used in this case"
|
||||
)
|
||||
}
|
||||
EmbedError::MissingDescription => {
|
||||
error!(span, "{prefix} the file description is missing")
|
||||
}
|
||||
EmbedError::MissingMimeType => {
|
||||
error!(span, "{prefix} the file mime type is missing")
|
||||
}
|
||||
}
|
||||
}
|
||||
// The below errors cannot occur yet, only once Typst supports full PDF/A
|
||||
// and PDF/UA. But let's still add a message just to be on the safe side.
|
||||
ValidationError::MissingAnnotationAltText => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing annotation alt text";
|
||||
hint: "please report this as a bug"
|
||||
),
|
||||
ValidationError::MissingAltText => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing alt text";
|
||||
hint: "make sure your images and equations have alt text"
|
||||
),
|
||||
ValidationError::NoDocumentLanguage => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing document language";
|
||||
hint: "set the language of the document"
|
||||
),
|
||||
// Needs to be set by typst-pdf.
|
||||
ValidationError::MissingHeadingTitle => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing heading title";
|
||||
hint: "please report this as a bug"
|
||||
),
|
||||
ValidationError::MissingDocumentOutline => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing document outline";
|
||||
hint: "please report this as a bug"
|
||||
),
|
||||
ValidationError::MissingTagging => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing document tags";
|
||||
hint: "please report this as a bug"
|
||||
),
|
||||
ValidationError::NoDocumentTitle => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing document title";
|
||||
hint: "set the title of the document"
|
||||
),
|
||||
ValidationError::MissingDocumentDate => error!(
|
||||
Span::detached(),
|
||||
"{prefix} missing document date";
|
||||
hint: "set the date of the document"
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a krilla location to a span.
|
||||
fn to_span(loc: Option<krilla::surface::Location>) -> Span {
|
||||
loc.map(|l| Span::from_raw(NonZeroU64::new(l).unwrap()))
|
||||
.unwrap_or(Span::detached())
|
||||
}
|
||||
|
||||
fn collect_named_destinations(
|
||||
document: &PagedDocument,
|
||||
pic: &PageIndexConverter,
|
||||
) -> HashMap<Location, NamedDestination> {
|
||||
let mut locs_to_names = HashMap::new();
|
||||
|
||||
// Find all headings that have a label and are the first among other
|
||||
// headings with the same label.
|
||||
let matches: Vec<_> = {
|
||||
let mut seen = HashSet::new();
|
||||
document
|
||||
.introspector
|
||||
.query(&HeadingElem::elem().select())
|
||||
.iter()
|
||||
.filter_map(|elem| elem.location().zip(elem.label()))
|
||||
.filter(|&(_, label)| seen.insert(label))
|
||||
.collect()
|
||||
};
|
||||
|
||||
for (loc, label) in matches {
|
||||
let pos = document.introspector.position(loc);
|
||||
let index = pos.page.get() - 1;
|
||||
// We are subtracting 10 because the position of links e.g. to headings is always at the
|
||||
// baseline and if you link directly to it, the text will not be visible
|
||||
// because it is right above.
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
|
||||
// Only add named destination if page belonging to the position is exported.
|
||||
if let Some(index) = pic.pdf_page_index(index) {
|
||||
let named = NamedDestination::new(
|
||||
label.resolve().to_string(),
|
||||
XyzDestination::new(
|
||||
index,
|
||||
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
|
||||
),
|
||||
);
|
||||
locs_to_names.insert(loc, named);
|
||||
}
|
||||
}
|
||||
|
||||
locs_to_names
|
||||
}
|
||||
|
||||
pub(crate) struct PageIndexConverter {
|
||||
page_indices: HashMap<usize, usize>,
|
||||
skipped_pages: usize,
|
||||
}
|
||||
|
||||
impl PageIndexConverter {
|
||||
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
|
||||
let mut page_indices = HashMap::new();
|
||||
let mut skipped_pages = 0;
|
||||
|
||||
for i in 0..document.pages.len() {
|
||||
if options
|
||||
.page_ranges
|
||||
.as_ref()
|
||||
.is_some_and(|ranges| !ranges.includes_page_index(i))
|
||||
{
|
||||
skipped_pages += 1;
|
||||
} else {
|
||||
page_indices.insert(i, i - skipped_pages);
|
||||
}
|
||||
}
|
||||
|
||||
Self { page_indices, skipped_pages }
|
||||
}
|
||||
|
||||
pub(crate) fn has_skipped_pages(&self) -> bool {
|
||||
self.skipped_pages > 0
|
||||
}
|
||||
|
||||
/// Get the PDF page index of a page index, if it's not excluded.
|
||||
pub(crate) fn pdf_page_index(&self, page_index: usize) -> Option<usize> {
|
||||
self.page_indices.get(&page_index).copied()
|
||||
}
|
||||
}
|
@ -1,122 +1,54 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::EcoString;
|
||||
use pdf_writer::types::AssociationKind;
|
||||
use pdf_writer::{Filter, Finish, Name, Ref, Str, TextStr};
|
||||
use krilla::embed::{AssociationKind, EmbeddedFile};
|
||||
use krilla::Document;
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::foundations::{NativeElement, Packed, StyleChain};
|
||||
use typst_library::foundations::{NativeElement, StyleChain};
|
||||
use typst_library::layout::PagedDocument;
|
||||
use typst_library::pdf::{EmbedElem, EmbeddedFileRelationship};
|
||||
|
||||
use crate::catalog::{document_date, pdf_date};
|
||||
use crate::{deflate, NameExt, PdfChunk, StrExt, WithGlobalRefs};
|
||||
pub(crate) fn embed_files(
|
||||
typst_doc: &PagedDocument,
|
||||
document: &mut Document,
|
||||
) -> SourceResult<()> {
|
||||
let elements = typst_doc.introspector.query(&EmbedElem::elem().select());
|
||||
|
||||
/// Query for all [`EmbedElem`] and write them and their file specifications.
|
||||
///
|
||||
/// This returns a map of embedding names and references so that we can later
|
||||
/// add them to the catalog's `/Names` dictionary.
|
||||
pub fn write_embedded_files(
|
||||
ctx: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, BTreeMap<EcoString, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut embedded_files = BTreeMap::default();
|
||||
|
||||
let elements = ctx.document.introspector.query(&EmbedElem::elem().select());
|
||||
for elem in &elements {
|
||||
if !ctx.options.standards.embedded_files {
|
||||
// PDF/A-2 requires embedded files to be PDF/A-1 or PDF/A-2,
|
||||
// which we don't currently check.
|
||||
bail!(
|
||||
elem.span(),
|
||||
"file embeddings are not currently supported for PDF/A-2";
|
||||
hint: "PDF/A-3 supports arbitrary embedded files"
|
||||
);
|
||||
}
|
||||
|
||||
let embed = elem.to_packed::<EmbedElem>().unwrap();
|
||||
if embed.path.derived.len() > Str::PDFA_LIMIT {
|
||||
bail!(embed.span(), "embedded file path is too long");
|
||||
}
|
||||
|
||||
let id = embed_file(ctx, &mut chunk, embed)?;
|
||||
if embedded_files.insert(embed.path.derived.clone(), id).is_some() {
|
||||
bail!(
|
||||
elem.span(),
|
||||
"duplicate embedded file for path `{}`", embed.path.derived;
|
||||
hint: "embedded file paths must be unique",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((chunk, embedded_files))
|
||||
}
|
||||
|
||||
/// Write the embedded file stream and its file specification.
|
||||
fn embed_file(
|
||||
ctx: &WithGlobalRefs,
|
||||
chunk: &mut PdfChunk,
|
||||
embed: &Packed<EmbedElem>,
|
||||
) -> SourceResult<Ref> {
|
||||
let embedded_file_stream_ref = chunk.alloc.bump();
|
||||
let file_spec_dict_ref = chunk.alloc.bump();
|
||||
|
||||
let data = embed.data.as_slice();
|
||||
let compressed = deflate(data);
|
||||
|
||||
let mut embedded_file = chunk.embedded_file(embedded_file_stream_ref, &compressed);
|
||||
embedded_file.filter(Filter::FlateDecode);
|
||||
|
||||
if let Some(mime_type) = embed.mime_type(StyleChain::default()) {
|
||||
if mime_type.len() > Name::PDFA_LIMIT {
|
||||
bail!(embed.span(), "embedded file MIME type is too long");
|
||||
}
|
||||
embedded_file.subtype(Name(mime_type.as_bytes()));
|
||||
} else if ctx.options.standards.pdfa {
|
||||
bail!(embed.span(), "embedded files must have a MIME type in PDF/A-3");
|
||||
}
|
||||
|
||||
let mut params = embedded_file.params();
|
||||
params.size(data.len() as i32);
|
||||
|
||||
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
|
||||
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
|
||||
params.modification_date(pdf_date);
|
||||
} else if ctx.options.standards.pdfa {
|
||||
bail!(
|
||||
embed.span(),
|
||||
"the document must have a date when embedding files in PDF/A-3";
|
||||
hint: "`set document(date: none)` must not be used in this case"
|
||||
);
|
||||
}
|
||||
|
||||
params.finish();
|
||||
embedded_file.finish();
|
||||
|
||||
let mut file_spec = chunk.file_spec(file_spec_dict_ref);
|
||||
file_spec.path(Str(embed.path.derived.as_bytes()));
|
||||
file_spec.unic_file(TextStr(&embed.path.derived));
|
||||
file_spec
|
||||
.insert(Name(b"EF"))
|
||||
.dict()
|
||||
.pair(Name(b"F"), embedded_file_stream_ref)
|
||||
.pair(Name(b"UF"), embedded_file_stream_ref);
|
||||
|
||||
if ctx.options.standards.pdfa {
|
||||
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
|
||||
file_spec.association_kind(match embed.relationship(StyleChain::default()) {
|
||||
Some(EmbeddedFileRelationship::Source) => AssociationKind::Source,
|
||||
Some(EmbeddedFileRelationship::Data) => AssociationKind::Data,
|
||||
Some(EmbeddedFileRelationship::Alternative) => AssociationKind::Alternative,
|
||||
Some(EmbeddedFileRelationship::Supplement) => AssociationKind::Supplement,
|
||||
let span = embed.span();
|
||||
let derived_path = &embed.path.derived;
|
||||
let path = derived_path.to_string();
|
||||
let mime_type =
|
||||
embed.mime_type(StyleChain::default()).clone().map(|s| s.to_string());
|
||||
let description = embed
|
||||
.description(StyleChain::default())
|
||||
.clone()
|
||||
.map(|s| s.to_string());
|
||||
let association_kind = match embed.relationship(StyleChain::default()) {
|
||||
None => AssociationKind::Unspecified,
|
||||
});
|
||||
Some(e) => match e {
|
||||
EmbeddedFileRelationship::Source => AssociationKind::Source,
|
||||
EmbeddedFileRelationship::Data => AssociationKind::Data,
|
||||
EmbeddedFileRelationship::Alternative => AssociationKind::Alternative,
|
||||
EmbeddedFileRelationship::Supplement => AssociationKind::Supplement,
|
||||
},
|
||||
};
|
||||
let data: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(embed.data.clone());
|
||||
|
||||
let file = EmbeddedFile {
|
||||
path,
|
||||
mime_type,
|
||||
description,
|
||||
association_kind,
|
||||
data: data.into(),
|
||||
compress: true,
|
||||
location: Some(span.into_raw().get()),
|
||||
};
|
||||
|
||||
if document.embed_file(file).is_none() {
|
||||
bail!(span, "attempted to embed file {derived_path} twice");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(description) = embed.description(StyleChain::default()) {
|
||||
if description.len() > Str::PDFA_LIMIT {
|
||||
bail!(embed.span(), "embedded file description is too long");
|
||||
}
|
||||
file_spec.description(TextStr(description));
|
||||
}
|
||||
|
||||
Ok(file_spec_dict_ref)
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,53 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use pdf_writer::Ref;
|
||||
use typst_library::diag::SourceResult;
|
||||
|
||||
use crate::{PdfChunk, WithGlobalRefs};
|
||||
|
||||
/// A PDF external graphics state.
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct ExtGState {
|
||||
// In the range 0-255, needs to be divided before being written into the graphics state!
|
||||
pub stroke_opacity: u8,
|
||||
// In the range 0-255, needs to be divided before being written into the graphics state!
|
||||
pub fill_opacity: u8,
|
||||
}
|
||||
|
||||
impl Default for ExtGState {
|
||||
fn default() -> Self {
|
||||
Self { stroke_opacity: 255, fill_opacity: 255 }
|
||||
}
|
||||
}
|
||||
|
||||
impl ExtGState {
|
||||
pub fn uses_opacities(&self) -> bool {
|
||||
self.stroke_opacity != 255 || self.fill_opacity != 255
|
||||
}
|
||||
}
|
||||
|
||||
/// Embed all used external graphics states into the PDF.
|
||||
pub fn write_graphic_states(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<ExtGState, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for external_gs in resources.ext_gs.items() {
|
||||
if out.contains_key(external_gs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let id = chunk.alloc();
|
||||
out.insert(*external_gs, id);
|
||||
chunk
|
||||
.ext_graphics(id)
|
||||
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
|
||||
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
@ -1,278 +0,0 @@
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
|
||||
use pdf_writer::writers::{FontDescriptor, WMode};
|
||||
use pdf_writer::{Chunk, Filter, Finish, Name, Rect, Ref, Str};
|
||||
use subsetter::GlyphRemapper;
|
||||
use ttf_parser::{name_id, GlyphId, Tag};
|
||||
use typst_library::diag::{At, SourceResult};
|
||||
use typst_library::text::Font;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::SliceExt;
|
||||
|
||||
use crate::{deflate, EmExt, NameExt, PdfChunk, WithGlobalRefs};
|
||||
|
||||
const CFF: Tag = Tag::from_bytes(b"CFF ");
|
||||
const CFF2: Tag = Tag::from_bytes(b"CFF2");
|
||||
|
||||
const SUBSET_TAG_LEN: usize = 6;
|
||||
const IDENTITY_H: &str = "Identity-H";
|
||||
|
||||
pub(crate) const CMAP_NAME: Name = Name(b"Custom");
|
||||
pub(crate) const SYSTEM_INFO: SystemInfo = SystemInfo {
|
||||
registry: Str(b"Adobe"),
|
||||
ordering: Str(b"Identity"),
|
||||
supplement: 0,
|
||||
};
|
||||
|
||||
/// Embed all used fonts into the PDF.
|
||||
#[typst_macros::time(name = "write fonts")]
|
||||
pub fn write_fonts(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<Font, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for font in resources.fonts.items() {
|
||||
if out.contains_key(font) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let type0_ref = chunk.alloc();
|
||||
let cid_ref = chunk.alloc();
|
||||
let descriptor_ref = chunk.alloc();
|
||||
let cmap_ref = chunk.alloc();
|
||||
let data_ref = chunk.alloc();
|
||||
out.insert(font.clone(), type0_ref);
|
||||
|
||||
let glyph_set = resources.glyph_sets.get(font).unwrap();
|
||||
let glyph_remapper = resources.glyph_remappers.get(font).unwrap();
|
||||
let ttf = font.ttf();
|
||||
|
||||
// Do we have a TrueType or CFF font?
|
||||
//
|
||||
// FIXME: CFF2 must be handled differently and requires PDF 2.0
|
||||
// (or we have to convert it to CFF).
|
||||
let is_cff = ttf
|
||||
.raw_face()
|
||||
.table(CFF)
|
||||
.or_else(|| ttf.raw_face().table(CFF2))
|
||||
.is_some();
|
||||
|
||||
let base_font = base_font_name(font, glyph_set);
|
||||
let base_font_type0 = if is_cff {
|
||||
eco_format!("{base_font}-{IDENTITY_H}")
|
||||
} else {
|
||||
base_font.clone()
|
||||
};
|
||||
|
||||
// Write the base font object referencing the CID font.
|
||||
chunk
|
||||
.type0_font(type0_ref)
|
||||
.base_font(Name(base_font_type0.as_bytes()))
|
||||
.encoding_predefined(Name(IDENTITY_H.as_bytes()))
|
||||
.descendant_font(cid_ref)
|
||||
.to_unicode(cmap_ref);
|
||||
|
||||
// Write the CID font referencing the font descriptor.
|
||||
let mut cid = chunk.cid_font(cid_ref);
|
||||
cid.subtype(if is_cff { CidFontType::Type0 } else { CidFontType::Type2 });
|
||||
cid.base_font(Name(base_font.as_bytes()));
|
||||
cid.system_info(SYSTEM_INFO);
|
||||
cid.font_descriptor(descriptor_ref);
|
||||
cid.default_width(0.0);
|
||||
if !is_cff {
|
||||
cid.cid_to_gid_map_predefined(Name(b"Identity"));
|
||||
}
|
||||
|
||||
// Extract the widths of all glyphs.
|
||||
// `remapped_gids` returns an iterator over the old GIDs in their new sorted
|
||||
// order, so we can append the widths as is.
|
||||
let widths = glyph_remapper
|
||||
.remapped_gids()
|
||||
.map(|gid| {
|
||||
let width = ttf.glyph_hor_advance(GlyphId(gid)).unwrap_or(0);
|
||||
font.to_em(width).to_font_units()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Write all non-zero glyph widths.
|
||||
let mut first = 0;
|
||||
let mut width_writer = cid.widths();
|
||||
for (w, group) in widths.group_by_key(|&w| w) {
|
||||
let end = first + group.len();
|
||||
if w != 0.0 {
|
||||
let last = end - 1;
|
||||
width_writer.same(first as u16, last as u16, w);
|
||||
}
|
||||
first = end;
|
||||
}
|
||||
|
||||
width_writer.finish();
|
||||
cid.finish();
|
||||
|
||||
// Write the /ToUnicode character map, which maps glyph ids back to
|
||||
// unicode codepoints to enable copying out of the PDF.
|
||||
let cmap = create_cmap(glyph_set, glyph_remapper);
|
||||
chunk
|
||||
.cmap(cmap_ref, &cmap)
|
||||
.writing_mode(WMode::Horizontal)
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
let subset = subset_font(font, glyph_remapper)
|
||||
.map_err(|err| {
|
||||
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
|
||||
let name = postscript_name.as_deref().unwrap_or(&font.info().family);
|
||||
eco_format!("failed to process font {name}: {err}")
|
||||
})
|
||||
.at(Span::detached())?;
|
||||
|
||||
let mut stream = chunk.stream(data_ref, &subset);
|
||||
stream.filter(Filter::FlateDecode);
|
||||
if is_cff {
|
||||
stream.pair(Name(b"Subtype"), Name(b"CIDFontType0C"));
|
||||
}
|
||||
stream.finish();
|
||||
|
||||
let mut font_descriptor =
|
||||
write_font_descriptor(&mut chunk, descriptor_ref, font, &base_font);
|
||||
if is_cff {
|
||||
font_descriptor.font_file3(data_ref);
|
||||
} else {
|
||||
font_descriptor.font_file2(data_ref);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Writes a FontDescriptor dictionary.
|
||||
pub fn write_font_descriptor<'a>(
|
||||
pdf: &'a mut Chunk,
|
||||
descriptor_ref: Ref,
|
||||
font: &'a Font,
|
||||
base_font: &str,
|
||||
) -> FontDescriptor<'a> {
|
||||
let ttf = font.ttf();
|
||||
let metrics = font.metrics();
|
||||
let serif = font
|
||||
.find_name(name_id::POST_SCRIPT_NAME)
|
||||
.is_some_and(|name| name.contains("Serif"));
|
||||
|
||||
let mut flags = FontFlags::empty();
|
||||
flags.set(FontFlags::SERIF, serif);
|
||||
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
|
||||
flags.set(FontFlags::ITALIC, ttf.is_italic());
|
||||
flags.insert(FontFlags::SYMBOLIC);
|
||||
flags.insert(FontFlags::SMALL_CAP);
|
||||
|
||||
let global_bbox = ttf.global_bounding_box();
|
||||
let bbox = Rect::new(
|
||||
font.to_em(global_bbox.x_min).to_font_units(),
|
||||
font.to_em(global_bbox.y_min).to_font_units(),
|
||||
font.to_em(global_bbox.x_max).to_font_units(),
|
||||
font.to_em(global_bbox.y_max).to_font_units(),
|
||||
);
|
||||
|
||||
let italic_angle = ttf.italic_angle();
|
||||
let ascender = metrics.ascender.to_font_units();
|
||||
let descender = metrics.descender.to_font_units();
|
||||
let cap_height = metrics.cap_height.to_font_units();
|
||||
let stem_v = 10.0 + 0.244 * (f32::from(ttf.weight().to_number()) - 50.0);
|
||||
|
||||
// Write the font descriptor (contains metrics about the font).
|
||||
let mut font_descriptor = pdf.font_descriptor(descriptor_ref);
|
||||
font_descriptor
|
||||
.name(Name(base_font.as_bytes()))
|
||||
.flags(flags)
|
||||
.bbox(bbox)
|
||||
.italic_angle(italic_angle)
|
||||
.ascent(ascender)
|
||||
.descent(descender)
|
||||
.cap_height(cap_height)
|
||||
.stem_v(stem_v);
|
||||
|
||||
font_descriptor
|
||||
}
|
||||
|
||||
/// Subset a font to the given glyphs.
|
||||
///
|
||||
/// - For a font with TrueType outlines, this produces the whole OpenType font.
|
||||
/// - For a font with CFF outlines, this produces just the CFF font program.
|
||||
///
|
||||
/// In both cases, this returns the already compressed data.
|
||||
#[comemo::memoize]
|
||||
#[typst_macros::time(name = "subset font")]
|
||||
fn subset_font(
|
||||
font: &Font,
|
||||
glyph_remapper: &GlyphRemapper,
|
||||
) -> Result<Arc<Vec<u8>>, subsetter::Error> {
|
||||
let data = font.data();
|
||||
let subset = subsetter::subset(data, font.index(), glyph_remapper)?;
|
||||
let mut data = subset.as_ref();
|
||||
|
||||
// Extract the standalone CFF font program if applicable.
|
||||
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
|
||||
if let Some(cff) = raw.table(CFF) {
|
||||
data = cff;
|
||||
}
|
||||
|
||||
Ok(Arc::new(deflate(data)))
|
||||
}
|
||||
|
||||
/// Creates the base font name for a font with a specific glyph subset.
|
||||
/// Consists of a subset tag and the PostScript name of the font.
|
||||
///
|
||||
/// Returns a string of length maximum 116, so that even with `-Identity-H`
|
||||
/// added it does not exceed the maximum PDF/A name length of 127.
|
||||
pub(crate) fn base_font_name<T: Hash>(font: &Font, glyphs: &T) -> EcoString {
|
||||
const MAX_LEN: usize = Name::PDFA_LIMIT - REST_LEN;
|
||||
const REST_LEN: usize = SUBSET_TAG_LEN + 1 + 1 + IDENTITY_H.len();
|
||||
|
||||
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
|
||||
let name = postscript_name.as_deref().unwrap_or("unknown");
|
||||
let trimmed = &name[..name.len().min(MAX_LEN)];
|
||||
|
||||
// Hash the full name (we might have trimmed) and the glyphs to produce
|
||||
// a fairly unique subset tag.
|
||||
let subset_tag = subset_tag(&(name, glyphs));
|
||||
|
||||
eco_format!("{subset_tag}+{trimmed}")
|
||||
}
|
||||
|
||||
/// Produce a unique 6 letter tag for a glyph set.
|
||||
pub(crate) fn subset_tag<T: Hash>(glyphs: &T) -> EcoString {
|
||||
const BASE: u128 = 26;
|
||||
let mut hash = typst_utils::hash128(&glyphs);
|
||||
let mut letter = [b'A'; SUBSET_TAG_LEN];
|
||||
for l in letter.iter_mut() {
|
||||
*l = b'A' + (hash % BASE) as u8;
|
||||
hash /= BASE;
|
||||
}
|
||||
std::str::from_utf8(&letter).unwrap().into()
|
||||
}
|
||||
|
||||
/// Create a compressed `/ToUnicode` CMap.
|
||||
#[comemo::memoize]
|
||||
#[typst_macros::time(name = "create cmap")]
|
||||
fn create_cmap(
|
||||
glyph_set: &BTreeMap<u16, EcoString>,
|
||||
glyph_remapper: &GlyphRemapper,
|
||||
) -> Arc<Vec<u8>> {
|
||||
// Produce a reverse mapping from glyphs' CIDs to unicode strings.
|
||||
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
|
||||
for (&g, text) in glyph_set.iter() {
|
||||
// See commend in `write_normal_text` for why we can choose the CID this way.
|
||||
let cid = glyph_remapper.get(g).unwrap();
|
||||
if !text.is_empty() {
|
||||
cmap.pair_with_multiple(cid, text.chars());
|
||||
}
|
||||
}
|
||||
Arc::new(deflate(&cmap.finish()))
|
||||
}
|
@ -1,512 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::f32::consts::{PI, TAU};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{ColorSpaceOperand, FunctionShadingType};
|
||||
use pdf_writer::writers::StreamShadingType;
|
||||
use pdf_writer::{Filter, Finish, Name, Ref};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
|
||||
use typst_library::visualize::{
|
||||
Color, ColorSpace, Gradient, RatioOrAngle, RelativeTo, WeightedColor,
|
||||
};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use crate::color::{
|
||||
self, check_cmyk_allowed, ColorSpaceExt, PaintEncode, QuantizedColor,
|
||||
};
|
||||
use crate::{content, deflate, transform_to_array, AbsExt, PdfChunk, WithGlobalRefs};
|
||||
|
||||
/// A unique-transform-aspect-ratio combination that will be encoded into the
|
||||
/// PDF.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct PdfGradient {
|
||||
/// The transform to apply to the gradient.
|
||||
pub transform: Transform,
|
||||
/// The aspect ratio of the gradient.
|
||||
/// Required for aspect ratio correction.
|
||||
pub aspect_ratio: Ratio,
|
||||
/// The gradient.
|
||||
pub gradient: Gradient,
|
||||
/// The corrected angle of the gradient.
|
||||
pub angle: Angle,
|
||||
}
|
||||
|
||||
/// Writes the actual gradients (shading patterns) to the PDF.
|
||||
/// This is performed once after writing all pages.
|
||||
pub fn write_gradients(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<PdfGradient, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for pdf_gradient in resources.gradients.items() {
|
||||
if out.contains_key(pdf_gradient) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let shading = chunk.alloc();
|
||||
out.insert(pdf_gradient.clone(), shading);
|
||||
|
||||
let PdfGradient { transform, aspect_ratio, gradient, angle } = pdf_gradient;
|
||||
|
||||
let color_space = if gradient.space().hue_index().is_some() {
|
||||
ColorSpace::Oklab
|
||||
} else {
|
||||
gradient.space()
|
||||
};
|
||||
|
||||
if color_space == ColorSpace::Cmyk {
|
||||
check_cmyk_allowed(context.options)?;
|
||||
}
|
||||
|
||||
let mut shading_pattern = match &gradient {
|
||||
Gradient::Linear(_) => {
|
||||
let shading_function =
|
||||
shading_function(gradient, &mut chunk, color_space);
|
||||
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
|
||||
let mut shading = shading_pattern.function_shading();
|
||||
shading.shading_type(FunctionShadingType::Axial);
|
||||
|
||||
color::write(
|
||||
color_space,
|
||||
shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
let (mut sin, mut cos) = (angle.sin(), angle.cos());
|
||||
|
||||
// Scale to edges of unit square.
|
||||
let factor = cos.abs() + sin.abs();
|
||||
sin *= factor;
|
||||
cos *= factor;
|
||||
|
||||
let (x1, y1, x2, y2): (f64, f64, f64, f64) = match angle.quadrant() {
|
||||
Quadrant::First => (0.0, 0.0, cos, sin),
|
||||
Quadrant::Second => (1.0, 0.0, cos + 1.0, sin),
|
||||
Quadrant::Third => (1.0, 1.0, cos + 1.0, sin + 1.0),
|
||||
Quadrant::Fourth => (0.0, 1.0, cos, sin + 1.0),
|
||||
};
|
||||
|
||||
shading
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.function(shading_function)
|
||||
.coords([x1 as f32, y1 as f32, x2 as f32, y2 as f32])
|
||||
.extend([true; 2]);
|
||||
|
||||
shading.finish();
|
||||
|
||||
shading_pattern
|
||||
}
|
||||
Gradient::Radial(radial) => {
|
||||
let shading_function =
|
||||
shading_function(gradient, &mut chunk, color_space_of(gradient));
|
||||
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
|
||||
let mut shading = shading_pattern.function_shading();
|
||||
shading.shading_type(FunctionShadingType::Radial);
|
||||
|
||||
color::write(
|
||||
color_space,
|
||||
shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
shading
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.function(shading_function)
|
||||
.coords([
|
||||
radial.focal_center.x.get() as f32,
|
||||
radial.focal_center.y.get() as f32,
|
||||
radial.focal_radius.get() as f32,
|
||||
radial.center.x.get() as f32,
|
||||
radial.center.y.get() as f32,
|
||||
radial.radius.get() as f32,
|
||||
])
|
||||
.extend([true; 2]);
|
||||
|
||||
shading.finish();
|
||||
|
||||
shading_pattern
|
||||
}
|
||||
Gradient::Conic(_) => {
|
||||
let vertices = compute_vertex_stream(gradient, *aspect_ratio);
|
||||
|
||||
let stream_shading_id = chunk.alloc();
|
||||
let mut stream_shading =
|
||||
chunk.chunk.stream_shading(stream_shading_id, &vertices);
|
||||
|
||||
color::write(
|
||||
color_space,
|
||||
stream_shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
let range = color_space.range();
|
||||
stream_shading
|
||||
.bits_per_coordinate(16)
|
||||
.bits_per_component(16)
|
||||
.bits_per_flag(8)
|
||||
.shading_type(StreamShadingType::CoonsPatch)
|
||||
.decode(
|
||||
[0.0, 1.0, 0.0, 1.0].into_iter().chain(range.iter().copied()),
|
||||
)
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
stream_shading.finish();
|
||||
|
||||
let mut shading_pattern = chunk.shading_pattern(shading);
|
||||
shading_pattern.shading_ref(stream_shading_id);
|
||||
shading_pattern
|
||||
}
|
||||
};
|
||||
|
||||
shading_pattern.matrix(transform_to_array(*transform));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Writes an exponential or stitched function that expresses the gradient.
|
||||
fn shading_function(
|
||||
gradient: &Gradient,
|
||||
chunk: &mut PdfChunk,
|
||||
color_space: ColorSpace,
|
||||
) -> Ref {
|
||||
let function = chunk.alloc();
|
||||
let mut functions = vec![];
|
||||
let mut bounds = vec![];
|
||||
let mut encode = vec![];
|
||||
|
||||
// Create the individual gradient functions for each pair of stops.
|
||||
for window in gradient.stops_ref().windows(2) {
|
||||
let (first, second) = (window[0], window[1]);
|
||||
|
||||
// If we have a hue index or are using Oklab, we will create several
|
||||
// stops in-between to make the gradient smoother without interpolation
|
||||
// issues with native color spaces.
|
||||
let mut last_c = first.0;
|
||||
if gradient.space().hue_index().is_some() {
|
||||
for i in 0..=32 {
|
||||
let t = i as f64 / 32.0;
|
||||
let real_t = first.1.get() * (1.0 - t) + second.1.get() * t;
|
||||
|
||||
let c = gradient.sample(RatioOrAngle::Ratio(Ratio::new(real_t)));
|
||||
functions.push(single_gradient(chunk, last_c, c, color_space));
|
||||
bounds.push(real_t as f32);
|
||||
encode.extend([0.0, 1.0]);
|
||||
last_c = c;
|
||||
}
|
||||
}
|
||||
|
||||
bounds.push(second.1.get() as f32);
|
||||
functions.push(single_gradient(chunk, first.0, second.0, color_space));
|
||||
encode.extend([0.0, 1.0]);
|
||||
}
|
||||
|
||||
// Special case for gradients with only two stops.
|
||||
if functions.len() == 1 {
|
||||
return functions[0];
|
||||
}
|
||||
|
||||
// Remove the last bound, since it's not needed for the stitching function.
|
||||
bounds.pop();
|
||||
|
||||
// Create the stitching function.
|
||||
chunk
|
||||
.stitching_function(function)
|
||||
.domain([0.0, 1.0])
|
||||
.range(color_space.range().iter().copied())
|
||||
.functions(functions)
|
||||
.bounds(bounds)
|
||||
.encode(encode);
|
||||
|
||||
function
|
||||
}
|
||||
|
||||
/// Writes an exponential function that expresses a single segment (between two
|
||||
/// stops) of a gradient.
|
||||
fn single_gradient(
|
||||
chunk: &mut PdfChunk,
|
||||
first_color: Color,
|
||||
second_color: Color,
|
||||
color_space: ColorSpace,
|
||||
) -> Ref {
|
||||
let reference = chunk.alloc();
|
||||
chunk
|
||||
.exponential_function(reference)
|
||||
.range(color_space.range().iter().copied())
|
||||
.c0(color_space.convert(first_color))
|
||||
.c1(color_space.convert(second_color))
|
||||
.domain([0.0, 1.0])
|
||||
.n(1.0);
|
||||
|
||||
reference
|
||||
}
|
||||
|
||||
impl PaintEncode for Gradient {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let index = register_gradient(ctx, self, on_text, transforms);
|
||||
let id = eco_format!("Gr{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_fill_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let index = register_gradient(ctx, self, on_text, transforms);
|
||||
let id = eco_format!("Gr{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_stroke_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates a gradient to a named PDF resource.
|
||||
fn register_gradient(
|
||||
ctx: &mut content::Builder,
|
||||
gradient: &Gradient,
|
||||
on_text: bool,
|
||||
mut transforms: content::Transforms,
|
||||
) -> usize {
|
||||
// Edge cases for strokes.
|
||||
if transforms.size.x.is_zero() {
|
||||
transforms.size.x = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
if transforms.size.y.is_zero() {
|
||||
transforms.size.y = Abs::pt(1.0);
|
||||
}
|
||||
let size = match gradient.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.size,
|
||||
RelativeTo::Parent => transforms.container_size,
|
||||
};
|
||||
|
||||
let (offset_x, offset_y) = match gradient {
|
||||
Gradient::Conic(conic) => (
|
||||
-size.x * (1.0 - conic.center.x.get() / 2.0) / 2.0,
|
||||
-size.y * (1.0 - conic.center.y.get() / 2.0) / 2.0,
|
||||
),
|
||||
_ => (Abs::zero(), Abs::zero()),
|
||||
};
|
||||
|
||||
let rotation = gradient.angle().unwrap_or_else(Angle::zero);
|
||||
|
||||
let transform = match gradient.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.transform,
|
||||
RelativeTo::Parent => transforms.container_transform,
|
||||
};
|
||||
|
||||
let scale_offset = match gradient {
|
||||
Gradient::Conic(_) => 4.0_f64,
|
||||
_ => 1.0,
|
||||
};
|
||||
|
||||
let pdf_gradient = PdfGradient {
|
||||
aspect_ratio: size.aspect_ratio(),
|
||||
transform: transform
|
||||
.pre_concat(Transform::translate(
|
||||
offset_x * scale_offset,
|
||||
offset_y * scale_offset,
|
||||
))
|
||||
.pre_concat(Transform::scale(
|
||||
Ratio::new(size.x.to_pt() * scale_offset),
|
||||
Ratio::new(size.y.to_pt() * scale_offset),
|
||||
)),
|
||||
gradient: gradient.clone(),
|
||||
angle: Gradient::correct_aspect_ratio(rotation, size.aspect_ratio()),
|
||||
};
|
||||
|
||||
ctx.resources.colors.mark_as_used(color_space_of(gradient));
|
||||
|
||||
ctx.resources.gradients.insert(pdf_gradient)
|
||||
}
|
||||
|
||||
/// Writes a single Coons Patch as defined in the PDF specification
|
||||
/// to a binary vec.
|
||||
///
|
||||
/// Structure:
|
||||
/// - flag: `u8`
|
||||
/// - points: `[u16; 24]`
|
||||
/// - colors: `[u16; 4*N]` (N = number of components)
|
||||
fn write_patch(
|
||||
target: &mut Vec<u8>,
|
||||
t: f32,
|
||||
t1: f32,
|
||||
c0: &[u16],
|
||||
c1: &[u16],
|
||||
angle: Angle,
|
||||
) {
|
||||
let theta = -TAU * t + angle.to_rad() as f32 + PI;
|
||||
let theta1 = -TAU * t1 + angle.to_rad() as f32 + PI;
|
||||
|
||||
let (cp1, cp2) =
|
||||
control_point(Point::new(Abs::pt(0.5), Abs::pt(0.5)), 0.5, theta, theta1);
|
||||
|
||||
// Push the flag
|
||||
target.push(0);
|
||||
|
||||
let p1 =
|
||||
[u16::quantize(0.5, [0.0, 1.0]).to_be(), u16::quantize(0.5, [0.0, 1.0]).to_be()];
|
||||
|
||||
let p2 = [
|
||||
u16::quantize(theta.cos(), [-1.0, 1.0]).to_be(),
|
||||
u16::quantize(theta.sin(), [-1.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let p3 = [
|
||||
u16::quantize(theta1.cos(), [-1.0, 1.0]).to_be(),
|
||||
u16::quantize(theta1.sin(), [-1.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let cp1 = [
|
||||
u16::quantize(cp1.x.to_f32(), [0.0, 1.0]).to_be(),
|
||||
u16::quantize(cp1.y.to_f32(), [0.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let cp2 = [
|
||||
u16::quantize(cp2.x.to_f32(), [0.0, 1.0]).to_be(),
|
||||
u16::quantize(cp2.y.to_f32(), [0.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
// Push the points
|
||||
target.extend_from_slice(bytemuck::cast_slice(&[
|
||||
p1, p1, p2, p2, cp1, cp2, p3, p3, p1, p1, p1, p1,
|
||||
]));
|
||||
|
||||
// Push the colors.
|
||||
let colors = [c0, c0, c1, c1]
|
||||
.into_iter()
|
||||
.flat_map(|c| c.iter().copied().map(u16::to_be_bytes))
|
||||
.flatten();
|
||||
|
||||
target.extend(colors);
|
||||
}
|
||||
|
||||
fn control_point(c: Point, r: f32, angle_start: f32, angle_end: f32) -> (Point, Point) {
|
||||
let n = (TAU / (angle_end - angle_start)).abs();
|
||||
let f = ((angle_end - angle_start) / n).tan() * 4.0 / 3.0;
|
||||
|
||||
let p1 = c + Point::new(
|
||||
Abs::pt((r * angle_start.cos() - f * r * angle_start.sin()) as f64),
|
||||
Abs::pt((r * angle_start.sin() + f * r * angle_start.cos()) as f64),
|
||||
);
|
||||
|
||||
let p2 = c + Point::new(
|
||||
Abs::pt((r * angle_end.cos() + f * r * angle_end.sin()) as f64),
|
||||
Abs::pt((r * angle_end.sin() - f * r * angle_end.cos()) as f64),
|
||||
);
|
||||
|
||||
(p1, p2)
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn compute_vertex_stream(gradient: &Gradient, aspect_ratio: Ratio) -> Arc<Vec<u8>> {
|
||||
let Gradient::Conic(conic) = gradient else { unreachable!() };
|
||||
|
||||
// Generated vertices for the Coons patches
|
||||
let mut vertices = Vec::new();
|
||||
|
||||
// Correct the gradient's angle
|
||||
let angle = Gradient::correct_aspect_ratio(conic.angle, aspect_ratio);
|
||||
|
||||
for window in conic.stops.windows(2) {
|
||||
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
|
||||
|
||||
// Precision:
|
||||
// - On an even color, insert a stop every 90deg
|
||||
// - For a hue-based color space, insert 200 stops minimum
|
||||
// - On any other, insert 20 stops minimum
|
||||
let max_dt = if c0 == c1 {
|
||||
0.25
|
||||
} else if conic.space.hue_index().is_some() {
|
||||
0.005
|
||||
} else {
|
||||
0.05
|
||||
};
|
||||
let encode_space = conic
|
||||
.space
|
||||
.hue_index()
|
||||
.map(|_| ColorSpace::Oklab)
|
||||
.unwrap_or(conic.space);
|
||||
let mut t_x = t0.get();
|
||||
let dt = (t1.get() - t0.get()).min(max_dt);
|
||||
|
||||
// Special casing for sharp gradients.
|
||||
if t0 == t1 {
|
||||
write_patch(
|
||||
&mut vertices,
|
||||
t0.get() as f32,
|
||||
t1.get() as f32,
|
||||
&encode_space.convert(c0),
|
||||
&encode_space.convert(c1),
|
||||
angle,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
while t_x < t1.get() {
|
||||
let t_next = (t_x + dt).min(t1.get());
|
||||
|
||||
// The current progress in the current window.
|
||||
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
|
||||
let c = Color::mix_iter(
|
||||
[WeightedColor::new(c0, 1.0 - t(t_x)), WeightedColor::new(c1, t(t_x))],
|
||||
conic.space,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let c_next = Color::mix_iter(
|
||||
[
|
||||
WeightedColor::new(c0, 1.0 - t(t_next)),
|
||||
WeightedColor::new(c1, t(t_next)),
|
||||
],
|
||||
conic.space,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
write_patch(
|
||||
&mut vertices,
|
||||
t_x as f32,
|
||||
t_next as f32,
|
||||
&encode_space.convert(c),
|
||||
&encode_space.convert(c_next),
|
||||
angle,
|
||||
);
|
||||
|
||||
t_x = t_next;
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(deflate(&vertices))
|
||||
}
|
||||
|
||||
fn color_space_of(gradient: &Gradient) -> ColorSpace {
|
||||
if gradient.space().hue_index().is_some() {
|
||||
ColorSpace::Oklab
|
||||
} else {
|
||||
gradient.space()
|
||||
}
|
||||
}
|
@ -1,249 +1,244 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::Cursor;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
|
||||
use ecow::eco_format;
|
||||
use image::{DynamicImage, GenericImageView, Rgba};
|
||||
use pdf_writer::{Chunk, Filter, Finish, Ref};
|
||||
use typst_library::diag::{At, SourceResult, StrResult};
|
||||
use image::{DynamicImage, EncodableLayout, GenericImageView, Rgba};
|
||||
use krilla::image::{BitsPerComponent, CustomImage, ImageColorspace};
|
||||
use krilla::surface::Surface;
|
||||
use krilla_svg::{SurfaceExt, SvgSettings};
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::foundations::Smart;
|
||||
use typst_library::layout::{Abs, Angle, Ratio, Size, Transform};
|
||||
use typst_library::visualize::{
|
||||
ColorSpace, ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat,
|
||||
RasterImage, SvgImage,
|
||||
ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat, RasterImage,
|
||||
};
|
||||
use typst_utils::Deferred;
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::{color, deflate, PdfChunk, WithGlobalRefs};
|
||||
use crate::convert::{FrameContext, GlobalContext};
|
||||
use crate::util::{SizeExt, TransformExt};
|
||||
|
||||
/// Embed all used images into the PDF.
|
||||
#[typst_macros::time(name = "write images")]
|
||||
pub fn write_images(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<Image, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for (i, image) in resources.images.items().enumerate() {
|
||||
if out.contains_key(image) {
|
||||
continue;
|
||||
#[typst_macros::time(name = "handle image")]
|
||||
pub(crate) fn handle_image(
|
||||
gc: &mut GlobalContext,
|
||||
fc: &mut FrameContext,
|
||||
image: &Image,
|
||||
size: Size,
|
||||
surface: &mut Surface,
|
||||
span: Span,
|
||||
) -> SourceResult<()> {
|
||||
surface.push_transform(&fc.state().transform().to_krilla());
|
||||
surface.set_location(span.into_raw().get());
|
||||
|
||||
let interpolate = image.scaling() == Smart::Custom(ImageScaling::Smooth);
|
||||
|
||||
if let Some(alt) = image.alt() {
|
||||
surface.start_alt_text(alt);
|
||||
}
|
||||
|
||||
let (handle, span) = resources.deferred_images.get(&i).unwrap();
|
||||
let encoded = handle.wait().as_ref().map_err(Clone::clone).at(*span)?;
|
||||
gc.image_spans.insert(span);
|
||||
|
||||
match encoded {
|
||||
EncodedImage::Raster {
|
||||
data,
|
||||
filter,
|
||||
color_space,
|
||||
bits_per_component,
|
||||
width,
|
||||
height,
|
||||
compressed_icc,
|
||||
alpha,
|
||||
interpolate,
|
||||
} => {
|
||||
let image_ref = chunk.alloc();
|
||||
out.insert(image.clone(), image_ref);
|
||||
match image.kind() {
|
||||
ImageKind::Raster(raster) => {
|
||||
let (exif_transform, new_size) = exif_transform(raster, size);
|
||||
surface.push_transform(&exif_transform.to_krilla());
|
||||
|
||||
let mut image = chunk.chunk.image_xobject(image_ref, data);
|
||||
image.filter(*filter);
|
||||
image.width(*width as i32);
|
||||
image.height(*height as i32);
|
||||
image.bits_per_component(i32::from(*bits_per_component));
|
||||
image.interpolate(*interpolate);
|
||||
let image = match convert_raster(raster.clone(), interpolate) {
|
||||
None => bail!(span, "failed to process image"),
|
||||
Some(i) => i,
|
||||
};
|
||||
|
||||
let mut icc_ref = None;
|
||||
let space = image.color_space();
|
||||
if compressed_icc.is_some() {
|
||||
let id = chunk.alloc.bump();
|
||||
space.icc_based(id);
|
||||
icc_ref = Some(id);
|
||||
} else {
|
||||
color::write(
|
||||
*color_space,
|
||||
space,
|
||||
&context.globals.color_functions,
|
||||
if !gc.image_to_spans.contains_key(&image) {
|
||||
gc.image_to_spans.insert(image.clone(), span);
|
||||
}
|
||||
|
||||
surface.draw_image(image, new_size.to_krilla());
|
||||
surface.pop();
|
||||
}
|
||||
ImageKind::Svg(svg) => {
|
||||
surface.draw_svg(
|
||||
svg.tree(),
|
||||
size.to_krilla(),
|
||||
SvgSettings { embed_text: true, ..Default::default() },
|
||||
);
|
||||
}
|
||||
|
||||
// Add a second gray-scale image containing the alpha values if
|
||||
// this image has an alpha channel.
|
||||
if let Some((alpha_data, alpha_filter)) = alpha {
|
||||
let mask_ref = chunk.alloc.bump();
|
||||
image.s_mask(mask_ref);
|
||||
image.finish();
|
||||
|
||||
let mut mask = chunk.image_xobject(mask_ref, alpha_data);
|
||||
mask.filter(*alpha_filter);
|
||||
mask.width(*width as i32);
|
||||
mask.height(*height as i32);
|
||||
mask.color_space().device_gray();
|
||||
mask.bits_per_component(i32::from(*bits_per_component));
|
||||
mask.interpolate(*interpolate);
|
||||
} else {
|
||||
image.finish();
|
||||
}
|
||||
|
||||
if let (Some(compressed_icc), Some(icc_ref)) =
|
||||
(compressed_icc, icc_ref)
|
||||
{
|
||||
let mut stream = chunk.icc_profile(icc_ref, compressed_icc);
|
||||
stream.filter(Filter::FlateDecode);
|
||||
match color_space {
|
||||
ColorSpace::Srgb => {
|
||||
stream.n(3);
|
||||
stream.alternate().srgb();
|
||||
}
|
||||
ColorSpace::D65Gray => {
|
||||
stream.n(1);
|
||||
stream.alternate().d65_gray();
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
EncodedImage::Svg(svg_chunk, id) => {
|
||||
let mut map = HashMap::new();
|
||||
svg_chunk.renumber_into(&mut chunk.chunk, |old| {
|
||||
*map.entry(old).or_insert_with(|| chunk.alloc.bump())
|
||||
});
|
||||
out.insert(image.clone(), map[id]);
|
||||
}
|
||||
}
|
||||
if image.alt().is_some() {
|
||||
surface.end_alt_text();
|
||||
}
|
||||
|
||||
surface.pop();
|
||||
surface.reset_location();
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Creates a new PDF image from the given image.
|
||||
///
|
||||
/// Also starts the deferred encoding of the image.
|
||||
struct Repr {
|
||||
/// The original, underlying raster image.
|
||||
raster: RasterImage,
|
||||
/// The alpha channel of the raster image, if existing.
|
||||
alpha_channel: OnceLock<Option<Vec<u8>>>,
|
||||
/// A (potentially) converted version of the dynamic image stored `raster` that is
|
||||
/// guaranteed to either be in luma8 or rgb8, and thus can be used for the
|
||||
/// `color_channel` method of `CustomImage`.
|
||||
actual_dynamic: OnceLock<Arc<DynamicImage>>,
|
||||
}
|
||||
|
||||
/// A wrapper around `RasterImage` so that we can implement `CustomImage`.
|
||||
#[derive(Clone)]
|
||||
struct PdfImage(Arc<Repr>);
|
||||
|
||||
impl PdfImage {
|
||||
pub fn new(raster: RasterImage) -> Self {
|
||||
Self(Arc::new(Repr {
|
||||
raster,
|
||||
alpha_channel: OnceLock::new(),
|
||||
actual_dynamic: OnceLock::new(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for PdfImage {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
// `alpha_channel` and `actual_dynamic` are generated from the underlying `RasterImage`,
|
||||
// so this is enough. Since `raster` is prehashed, this is also very cheap.
|
||||
self.0.raster.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl CustomImage for PdfImage {
|
||||
fn color_channel(&self) -> &[u8] {
|
||||
self.0
|
||||
.actual_dynamic
|
||||
.get_or_init(|| {
|
||||
let dynamic = self.0.raster.dynamic();
|
||||
let channel_count = dynamic.color().channel_count();
|
||||
|
||||
match (dynamic.as_ref(), channel_count) {
|
||||
// Pure luma8 or rgb8 image, can use it directly.
|
||||
(DynamicImage::ImageLuma8(_), _) => dynamic.clone(),
|
||||
(DynamicImage::ImageRgb8(_), _) => dynamic.clone(),
|
||||
// Grey-scale image, convert to luma8.
|
||||
(_, 1 | 2) => Arc::new(DynamicImage::ImageLuma8(dynamic.to_luma8())),
|
||||
// Anything else, convert to rgb8.
|
||||
_ => Arc::new(DynamicImage::ImageRgb8(dynamic.to_rgb8())),
|
||||
}
|
||||
})
|
||||
.as_bytes()
|
||||
}
|
||||
|
||||
fn alpha_channel(&self) -> Option<&[u8]> {
|
||||
self.0
|
||||
.alpha_channel
|
||||
.get_or_init(|| {
|
||||
self.0.raster.dynamic().color().has_alpha().then(|| {
|
||||
self.0
|
||||
.raster
|
||||
.dynamic()
|
||||
.pixels()
|
||||
.map(|(_, _, Rgba([_, _, _, a]))| a)
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
.as_ref()
|
||||
.map(|v| &**v)
|
||||
}
|
||||
|
||||
fn bits_per_component(&self) -> BitsPerComponent {
|
||||
BitsPerComponent::Eight
|
||||
}
|
||||
|
||||
fn size(&self) -> (u32, u32) {
|
||||
(self.0.raster.width(), self.0.raster.height())
|
||||
}
|
||||
|
||||
fn icc_profile(&self) -> Option<&[u8]> {
|
||||
if matches!(
|
||||
self.0.raster.dynamic().as_ref(),
|
||||
DynamicImage::ImageLuma8(_)
|
||||
| DynamicImage::ImageLumaA8(_)
|
||||
| DynamicImage::ImageRgb8(_)
|
||||
| DynamicImage::ImageRgba8(_)
|
||||
) {
|
||||
self.0.raster.icc().map(|b| b.as_bytes())
|
||||
} else {
|
||||
// In all other cases, the dynamic will be converted into RGB8 or LUMA8, so the ICC
|
||||
// profile may become invalid, and thus we don't include it.
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn color_space(&self) -> ImageColorspace {
|
||||
// Remember that we convert all images to either RGB or luma.
|
||||
if self.0.raster.dynamic().color().has_color() {
|
||||
ImageColorspace::Rgb
|
||||
} else {
|
||||
ImageColorspace::Luma
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
pub fn deferred_image(
|
||||
image: Image,
|
||||
pdfa: bool,
|
||||
) -> (Deferred<StrResult<EncodedImage>>, Option<ColorSpace>) {
|
||||
let color_space = match image.kind() {
|
||||
ImageKind::Raster(raster) if raster.icc().is_none() => {
|
||||
Some(to_color_space(raster.dynamic().color()))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// PDF/A does not appear to allow interpolation.
|
||||
// See https://github.com/typst/typst/issues/2942.
|
||||
let interpolate = !pdfa && image.scaling() == Smart::Custom(ImageScaling::Smooth);
|
||||
|
||||
let deferred = Deferred::new(move || match image.kind() {
|
||||
ImageKind::Raster(raster) => Ok(encode_raster_image(raster, interpolate)),
|
||||
ImageKind::Svg(svg) => {
|
||||
let (chunk, id) = encode_svg(svg, pdfa)
|
||||
.map_err(|err| eco_format!("failed to convert SVG to PDF: {err}"))?;
|
||||
Ok(EncodedImage::Svg(chunk, id))
|
||||
}
|
||||
fn convert_raster(
|
||||
raster: RasterImage,
|
||||
interpolate: bool,
|
||||
) -> Option<krilla::image::Image> {
|
||||
if let RasterFormat::Exchange(ExchangeFormat::Jpg) = raster.format() {
|
||||
let image_data: Arc<dyn AsRef<[u8]> + Send + Sync> =
|
||||
Arc::new(raster.data().clone());
|
||||
let icc_profile = raster.icc().map(|i| {
|
||||
let i: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(i.clone());
|
||||
i
|
||||
});
|
||||
|
||||
(deferred, color_space)
|
||||
}
|
||||
|
||||
/// Encode an image with a suitable filter.
|
||||
#[typst_macros::time(name = "encode raster image")]
|
||||
fn encode_raster_image(image: &RasterImage, interpolate: bool) -> EncodedImage {
|
||||
let dynamic = image.dynamic();
|
||||
let color_space = to_color_space(dynamic.color());
|
||||
|
||||
let (filter, data, bits_per_component) =
|
||||
if image.format() == RasterFormat::Exchange(ExchangeFormat::Jpg) {
|
||||
let mut data = Cursor::new(vec![]);
|
||||
dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
|
||||
(Filter::DctDecode, data.into_inner(), 8)
|
||||
} else {
|
||||
// TODO: Encode flate streams with PNG-predictor?
|
||||
let (data, bits_per_component) = match (dynamic, color_space) {
|
||||
// RGB image.
|
||||
(DynamicImage::ImageRgb8(rgb), _) => (deflate(rgb.as_raw()), 8),
|
||||
// Grayscale image
|
||||
(DynamicImage::ImageLuma8(luma), _) => (deflate(luma.as_raw()), 8),
|
||||
(_, ColorSpace::D65Gray) => (deflate(dynamic.to_luma8().as_raw()), 8),
|
||||
// Anything else
|
||||
_ => (deflate(dynamic.to_rgb8().as_raw()), 8),
|
||||
};
|
||||
(Filter::FlateDecode, data, bits_per_component)
|
||||
};
|
||||
|
||||
let compressed_icc = image.icc().map(|data| deflate(data));
|
||||
let alpha = dynamic.color().has_alpha().then(|| encode_alpha(dynamic));
|
||||
|
||||
EncodedImage::Raster {
|
||||
data,
|
||||
filter,
|
||||
color_space,
|
||||
bits_per_component,
|
||||
width: image.width(),
|
||||
height: image.height(),
|
||||
compressed_icc,
|
||||
alpha,
|
||||
krilla::image::Image::from_jpeg_with_icc(
|
||||
image_data.into(),
|
||||
icc_profile.map(|i| i.into()),
|
||||
interpolate,
|
||||
)
|
||||
} else {
|
||||
krilla::image::Image::from_custom(PdfImage::new(raster), interpolate)
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode an image's alpha channel if present.
|
||||
#[typst_macros::time(name = "encode alpha")]
|
||||
fn encode_alpha(image: &DynamicImage) -> (Vec<u8>, Filter) {
|
||||
let pixels: Vec<_> = image.pixels().map(|(_, _, Rgba([_, _, _, a]))| a).collect();
|
||||
(deflate(&pixels), Filter::FlateDecode)
|
||||
}
|
||||
|
||||
/// Encode an SVG into a chunk of PDF objects.
|
||||
#[typst_macros::time(name = "encode svg")]
|
||||
fn encode_svg(
|
||||
svg: &SvgImage,
|
||||
pdfa: bool,
|
||||
) -> Result<(Chunk, Ref), svg2pdf::ConversionError> {
|
||||
svg2pdf::to_chunk(
|
||||
svg.tree(),
|
||||
svg2pdf::ConversionOptions { pdfa, ..Default::default() },
|
||||
fn exif_transform(image: &RasterImage, size: Size) -> (Transform, Size) {
|
||||
let base = |hp: bool, vp: bool, mut base_ts: Transform, size: Size| {
|
||||
if hp {
|
||||
// Flip horizontally in-place.
|
||||
base_ts = base_ts.pre_concat(
|
||||
Transform::scale(-Ratio::one(), Ratio::one())
|
||||
.pre_concat(Transform::translate(-size.x, Abs::zero())),
|
||||
)
|
||||
}
|
||||
|
||||
/// A pre-encoded image.
|
||||
pub enum EncodedImage {
|
||||
/// A pre-encoded rasterized image.
|
||||
Raster {
|
||||
/// The raw, pre-deflated image data.
|
||||
data: Vec<u8>,
|
||||
/// The filter to use for the image.
|
||||
filter: Filter,
|
||||
/// Which color space this image is encoded in.
|
||||
color_space: ColorSpace,
|
||||
/// How many bits of each color component are stored.
|
||||
bits_per_component: u8,
|
||||
/// The image's width.
|
||||
width: u32,
|
||||
/// The image's height.
|
||||
height: u32,
|
||||
/// The image's ICC profile, deflated, if any.
|
||||
compressed_icc: Option<Vec<u8>>,
|
||||
/// The alpha channel of the image, pre-deflated, if any.
|
||||
alpha: Option<(Vec<u8>, Filter)>,
|
||||
/// Whether image interpolation should be enabled.
|
||||
interpolate: bool,
|
||||
},
|
||||
/// A vector graphic.
|
||||
///
|
||||
/// The chunk is the SVG converted to PDF objects.
|
||||
Svg(Chunk, Ref),
|
||||
if vp {
|
||||
// Flip vertically in-place.
|
||||
base_ts = base_ts.pre_concat(
|
||||
Transform::scale(Ratio::one(), -Ratio::one())
|
||||
.pre_concat(Transform::translate(Abs::zero(), -size.y)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Matches an [`image::ColorType`] to [`ColorSpace`].
|
||||
fn to_color_space(color: image::ColorType) -> ColorSpace {
|
||||
use image::ColorType::*;
|
||||
match color {
|
||||
L8 | La8 | L16 | La16 => ColorSpace::D65Gray,
|
||||
Rgb8 | Rgba8 | Rgb16 | Rgba16 | Rgb32F | Rgba32F => ColorSpace::Srgb,
|
||||
_ => unimplemented!(),
|
||||
base_ts
|
||||
};
|
||||
|
||||
let no_flipping =
|
||||
|hp: bool, vp: bool| (base(hp, vp, Transform::identity(), size), size);
|
||||
|
||||
let with_flipping = |hp: bool, vp: bool| {
|
||||
let base_ts = Transform::rotate_at(Angle::deg(90.0), Abs::zero(), Abs::zero())
|
||||
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()));
|
||||
let inv_size = Size::new(size.y, size.x);
|
||||
(base(hp, vp, base_ts, inv_size), inv_size)
|
||||
};
|
||||
|
||||
match image.exif_rotation() {
|
||||
Some(2) => no_flipping(true, false),
|
||||
Some(3) => no_flipping(true, true),
|
||||
Some(4) => no_flipping(false, true),
|
||||
Some(5) => with_flipping(false, false),
|
||||
Some(6) => with_flipping(true, false),
|
||||
Some(7) => with_flipping(true, true),
|
||||
Some(8) => with_flipping(false, true),
|
||||
_ => no_flipping(false, false),
|
||||
}
|
||||
}
|
||||
|
@ -1,81 +1,33 @@
|
||||
//! Exporting of Typst documents into PDFs.
|
||||
//! Exporting Typst documents to PDF.
|
||||
|
||||
mod catalog;
|
||||
mod color;
|
||||
mod color_font;
|
||||
mod content;
|
||||
mod convert;
|
||||
mod embed;
|
||||
mod extg;
|
||||
mod font;
|
||||
mod gradient;
|
||||
mod image;
|
||||
mod named_destination;
|
||||
mod link;
|
||||
mod metadata;
|
||||
mod outline;
|
||||
mod page;
|
||||
mod resources;
|
||||
mod tiling;
|
||||
mod paint;
|
||||
mod shape;
|
||||
mod text;
|
||||
mod util;
|
||||
|
||||
pub use self::metadata::{Timestamp, Timezone};
|
||||
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::Hash;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use base64::Engine;
|
||||
use ecow::EcoString;
|
||||
use pdf_writer::{Chunk, Name, Pdf, Ref, Str, TextStr};
|
||||
use ecow::eco_format;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst_library::diag::{bail, SourceResult, StrResult};
|
||||
use typst_library::foundations::{Datetime, Smart};
|
||||
use typst_library::layout::{Abs, Em, PageRanges, PagedDocument, Transform};
|
||||
use typst_library::text::Font;
|
||||
use typst_library::visualize::Image;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::Deferred;
|
||||
|
||||
use crate::catalog::write_catalog;
|
||||
use crate::color::{alloc_color_functions_refs, ColorFunctionRefs};
|
||||
use crate::color_font::{write_color_fonts, ColorFontSlice};
|
||||
use crate::embed::write_embedded_files;
|
||||
use crate::extg::{write_graphic_states, ExtGState};
|
||||
use crate::font::write_fonts;
|
||||
use crate::gradient::{write_gradients, PdfGradient};
|
||||
use crate::image::write_images;
|
||||
use crate::named_destination::{write_named_destinations, NamedDestinations};
|
||||
use crate::page::{alloc_page_refs, traverse_pages, write_page_tree, EncodedPage};
|
||||
use crate::resources::{
|
||||
alloc_resources_refs, write_resource_dictionaries, Resources, ResourcesRefs,
|
||||
};
|
||||
use crate::tiling::{write_tilings, PdfTiling};
|
||||
use typst_library::foundations::Smart;
|
||||
use typst_library::layout::{PageRanges, PagedDocument};
|
||||
|
||||
/// Export a document into a PDF file.
|
||||
///
|
||||
/// Returns the raw bytes making up the PDF file.
|
||||
#[typst_macros::time(name = "pdf")]
|
||||
pub fn pdf(document: &PagedDocument, options: &PdfOptions) -> SourceResult<Vec<u8>> {
|
||||
PdfBuilder::new(document, options)
|
||||
.phase(|builder| builder.run(traverse_pages))?
|
||||
.phase(|builder| {
|
||||
Ok(GlobalRefs {
|
||||
color_functions: builder.run(alloc_color_functions_refs)?,
|
||||
pages: builder.run(alloc_page_refs)?,
|
||||
resources: builder.run(alloc_resources_refs)?,
|
||||
})
|
||||
})?
|
||||
.phase(|builder| {
|
||||
Ok(References {
|
||||
named_destinations: builder.run(write_named_destinations)?,
|
||||
fonts: builder.run(write_fonts)?,
|
||||
color_fonts: builder.run(write_color_fonts)?,
|
||||
images: builder.run(write_images)?,
|
||||
gradients: builder.run(write_gradients)?,
|
||||
tilings: builder.run(write_tilings)?,
|
||||
ext_gs: builder.run(write_graphic_states)?,
|
||||
embedded_files: builder.run(write_embedded_files)?,
|
||||
})
|
||||
})?
|
||||
.phase(|builder| builder.run(write_page_tree))?
|
||||
.phase(|builder| builder.run(write_resource_dictionaries))?
|
||||
.export_with(write_catalog)
|
||||
convert::convert(document, options)
|
||||
}
|
||||
|
||||
/// Settings for PDF export.
|
||||
@ -103,82 +55,74 @@ pub struct PdfOptions<'a> {
|
||||
pub standards: PdfStandards,
|
||||
}
|
||||
|
||||
/// A timestamp with timezone information.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Timestamp {
|
||||
/// The datetime of the timestamp.
|
||||
pub(crate) datetime: Datetime,
|
||||
/// The timezone of the timestamp.
|
||||
pub(crate) timezone: Timezone,
|
||||
}
|
||||
|
||||
impl Timestamp {
|
||||
/// Create a new timestamp with a given datetime and UTC suffix.
|
||||
pub fn new_utc(datetime: Datetime) -> Self {
|
||||
Self { datetime, timezone: Timezone::UTC }
|
||||
}
|
||||
|
||||
/// Create a new timestamp with a given datetime, and a local timezone offset.
|
||||
pub fn new_local(datetime: Datetime, whole_minute_offset: i32) -> Option<Self> {
|
||||
let hour_offset = (whole_minute_offset / 60).try_into().ok()?;
|
||||
// Note: the `%` operator in Rust is the remainder operator, not the
|
||||
// modulo operator. The remainder operator can return negative results.
|
||||
// We can simply apply `abs` here because we assume the `minute_offset`
|
||||
// will have the same sign as `hour_offset`.
|
||||
let minute_offset = (whole_minute_offset % 60).abs().try_into().ok()?;
|
||||
match (hour_offset, minute_offset) {
|
||||
// Only accept valid timezone offsets with `-23 <= hours <= 23`,
|
||||
// and `0 <= minutes <= 59`.
|
||||
(-23..=23, 0..=59) => Some(Self {
|
||||
datetime,
|
||||
timezone: Timezone::Local { hour_offset, minute_offset },
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A timezone.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Timezone {
|
||||
/// The UTC timezone.
|
||||
UTC,
|
||||
/// The local timezone offset from UTC. And the `minute_offset` will have
|
||||
/// same sign as `hour_offset`.
|
||||
Local { hour_offset: i8, minute_offset: u8 },
|
||||
}
|
||||
|
||||
/// Encapsulates a list of compatible PDF standards.
|
||||
#[derive(Clone)]
|
||||
pub struct PdfStandards {
|
||||
/// For now, we simplify to just PDF/A. But it can be more fine-grained in
|
||||
/// the future.
|
||||
pub(crate) pdfa: bool,
|
||||
/// Whether the standard allows for embedding any kind of file into the PDF.
|
||||
/// We disallow this for PDF/A-2, since it only allows embedding
|
||||
/// PDF/A-1 and PDF/A-2 documents.
|
||||
pub(crate) embedded_files: bool,
|
||||
/// Part of the PDF/A standard.
|
||||
pub(crate) pdfa_part: Option<(i32, &'static str)>,
|
||||
pub(crate) config: krilla::configure::Configuration,
|
||||
}
|
||||
|
||||
impl PdfStandards {
|
||||
/// Validates a list of PDF standards for compatibility and returns their
|
||||
/// encapsulated representation.
|
||||
pub fn new(list: &[PdfStandard]) -> StrResult<Self> {
|
||||
let a2b = list.contains(&PdfStandard::A_2b);
|
||||
let a3b = list.contains(&PdfStandard::A_3b);
|
||||
use krilla::configure::{Configuration, PdfVersion, Validator};
|
||||
|
||||
if a2b && a3b {
|
||||
bail!("PDF cannot conform to A-2B and A-3B at the same time")
|
||||
let mut version: Option<PdfVersion> = None;
|
||||
let mut set_version = |v: PdfVersion| -> StrResult<()> {
|
||||
if let Some(prev) = version {
|
||||
bail!(
|
||||
"PDF cannot conform to {} and {} at the same time",
|
||||
prev.as_str(),
|
||||
v.as_str()
|
||||
);
|
||||
}
|
||||
version = Some(v);
|
||||
Ok(())
|
||||
};
|
||||
|
||||
let mut validator = None;
|
||||
let mut set_validator = |v: Validator| -> StrResult<()> {
|
||||
if validator.is_some() {
|
||||
bail!("Typst currently only supports one PDF substandard at a time");
|
||||
}
|
||||
validator = Some(v);
|
||||
Ok(())
|
||||
};
|
||||
|
||||
for standard in list {
|
||||
match standard {
|
||||
PdfStandard::V_1_4 => set_version(PdfVersion::Pdf14)?,
|
||||
PdfStandard::V_1_5 => set_version(PdfVersion::Pdf15)?,
|
||||
PdfStandard::V_1_6 => set_version(PdfVersion::Pdf16)?,
|
||||
PdfStandard::V_1_7 => set_version(PdfVersion::Pdf17)?,
|
||||
PdfStandard::V_2_0 => set_version(PdfVersion::Pdf20)?,
|
||||
PdfStandard::A_1b => set_validator(Validator::A1_B)?,
|
||||
PdfStandard::A_2b => set_validator(Validator::A2_B)?,
|
||||
PdfStandard::A_2u => set_validator(Validator::A2_U)?,
|
||||
PdfStandard::A_3b => set_validator(Validator::A3_B)?,
|
||||
PdfStandard::A_3u => set_validator(Validator::A3_U)?,
|
||||
PdfStandard::A_4 => set_validator(Validator::A4)?,
|
||||
PdfStandard::A_4f => set_validator(Validator::A4F)?,
|
||||
PdfStandard::A_4e => set_validator(Validator::A4E)?,
|
||||
}
|
||||
}
|
||||
|
||||
let pdfa = a2b || a3b;
|
||||
Ok(Self {
|
||||
pdfa,
|
||||
embedded_files: !a2b,
|
||||
pdfa_part: pdfa.then_some((if a2b { 2 } else { 3 }, "B")),
|
||||
})
|
||||
let config = match (version, validator) {
|
||||
(Some(version), Some(validator)) => {
|
||||
Configuration::new_with(validator, version).ok_or_else(|| {
|
||||
eco_format!(
|
||||
"{} is not compatible with {}",
|
||||
version.as_str(),
|
||||
validator.as_str()
|
||||
)
|
||||
})?
|
||||
}
|
||||
(Some(version), None) => Configuration::new_with_version(version),
|
||||
(None, Some(validator)) => Configuration::new_with_validator(validator),
|
||||
(None, None) => Configuration::new_with_version(PdfVersion::Pdf17),
|
||||
};
|
||||
|
||||
Ok(Self { config })
|
||||
}
|
||||
}
|
||||
|
||||
@ -190,7 +134,10 @@ impl Debug for PdfStandards {
|
||||
|
||||
impl Default for PdfStandards {
|
||||
fn default() -> Self {
|
||||
Self { pdfa: false, embedded_files: true, pdfa_part: None }
|
||||
use krilla::configure::{Configuration, PdfVersion};
|
||||
Self {
|
||||
config: Configuration::new_with_version(PdfVersion::Pdf17),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,531 +148,43 @@ impl Default for PdfStandards {
|
||||
#[allow(non_camel_case_types)]
|
||||
#[non_exhaustive]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.4.
|
||||
#[serde(rename = "1.4")]
|
||||
V_1_4,
|
||||
/// PDF 1.5.
|
||||
#[serde(rename = "1.5")]
|
||||
V_1_5,
|
||||
/// PDF 1.5.
|
||||
#[serde(rename = "1.6")]
|
||||
V_1_6,
|
||||
/// PDF 1.7.
|
||||
#[serde(rename = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF 2.0.
|
||||
#[serde(rename = "2.0")]
|
||||
V_2_0,
|
||||
/// PDF/A-1b.
|
||||
#[serde(rename = "a-1b")]
|
||||
A_1b,
|
||||
/// PDF/A-2b.
|
||||
#[serde(rename = "a-2b")]
|
||||
A_2b,
|
||||
/// PDF/A-3b.
|
||||
/// PDF/A-2u.
|
||||
#[serde(rename = "a-2u")]
|
||||
A_2u,
|
||||
/// PDF/A-3u.
|
||||
#[serde(rename = "a-3b")]
|
||||
A_3b,
|
||||
}
|
||||
|
||||
/// A struct to build a PDF following a fixed succession of phases.
|
||||
///
|
||||
/// This type uses generics to represent its current state. `S` (for "state") is
|
||||
/// all data that was produced by the previous phases, that is now read-only.
|
||||
///
|
||||
/// Phase after phase, this state will be transformed. Each phase corresponds to
|
||||
/// a call to the [eponymous function](`PdfBuilder::phase`) and produces a new
|
||||
/// part of the state, that will be aggregated with all other information, for
|
||||
/// consumption during the next phase.
|
||||
///
|
||||
/// In other words: this struct follows the **typestate pattern**. This prevents
|
||||
/// you from using data that is not yet available, at the type level.
|
||||
///
|
||||
/// Each phase consists of processes, that can read the state of the previous
|
||||
/// phases, and construct a part of the new state.
|
||||
///
|
||||
/// A final step, that has direct access to the global reference allocator and
|
||||
/// PDF document, can be run with [`PdfBuilder::export_with`].
|
||||
struct PdfBuilder<S> {
|
||||
/// The context that has been accumulated so far.
|
||||
state: S,
|
||||
/// A global bump allocator.
|
||||
alloc: Ref,
|
||||
/// The PDF document that is being written.
|
||||
pdf: Pdf,
|
||||
}
|
||||
|
||||
/// The initial state: we are exploring the document, collecting all resources
|
||||
/// that will be necessary later. The content of the pages is also built during
|
||||
/// this phase.
|
||||
struct WithDocument<'a> {
|
||||
/// The Typst document that is exported.
|
||||
document: &'a PagedDocument,
|
||||
/// Settings for PDF export.
|
||||
options: &'a PdfOptions<'a>,
|
||||
}
|
||||
|
||||
/// At this point, resources were listed, but they don't have any reference
|
||||
/// associated with them.
|
||||
///
|
||||
/// This phase allocates some global references.
|
||||
struct WithResources<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
/// The content of the pages encoded as PDF content streams.
|
||||
///
|
||||
/// The pages are at the index corresponding to their page number, but they
|
||||
/// may be `None` if they are not in the range specified by
|
||||
/// `exported_pages`.
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
/// The PDF resources that are used in the content of the pages.
|
||||
resources: Resources<()>,
|
||||
}
|
||||
|
||||
/// Global references.
|
||||
struct GlobalRefs {
|
||||
/// References for color conversion functions.
|
||||
color_functions: ColorFunctionRefs,
|
||||
/// Reference for pages.
|
||||
///
|
||||
/// Items of this vector are `None` if the corresponding page is not
|
||||
/// exported.
|
||||
pages: Vec<Option<Ref>>,
|
||||
/// References for the resource dictionaries.
|
||||
resources: ResourcesRefs,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithDocument<'a>, (Vec<Option<EncodedPage>>, Resources<()>))>
|
||||
for WithResources<'a>
|
||||
{
|
||||
fn from(
|
||||
(previous, (pages, resources)): (
|
||||
WithDocument<'a>,
|
||||
(Vec<Option<EncodedPage>>, Resources<()>),
|
||||
),
|
||||
) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
pages,
|
||||
resources,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// At this point, the resources have been collected, and global references have
|
||||
/// been allocated.
|
||||
///
|
||||
/// We are now writing objects corresponding to resources, and giving them references,
|
||||
/// that will be collected in [`References`].
|
||||
struct WithGlobalRefs<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
/// Resources are the same as in previous phases, but each dictionary now has a reference.
|
||||
resources: Resources,
|
||||
/// Global references that were just allocated.
|
||||
globals: GlobalRefs,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithResources<'a>, GlobalRefs)> for WithGlobalRefs<'a> {
|
||||
fn from((previous, globals): (WithResources<'a>, GlobalRefs)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
pages: previous.pages,
|
||||
resources: previous.resources.with_refs(&globals.resources),
|
||||
globals,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The references that have been assigned to each object.
|
||||
struct References {
|
||||
/// List of named destinations, each with an ID.
|
||||
named_destinations: NamedDestinations,
|
||||
/// The IDs of written fonts.
|
||||
fonts: HashMap<Font, Ref>,
|
||||
/// The IDs of written color fonts.
|
||||
color_fonts: HashMap<ColorFontSlice, Ref>,
|
||||
/// The IDs of written images.
|
||||
images: HashMap<Image, Ref>,
|
||||
/// The IDs of written gradients.
|
||||
gradients: HashMap<PdfGradient, Ref>,
|
||||
/// The IDs of written tilings.
|
||||
tilings: HashMap<PdfTiling, Ref>,
|
||||
/// The IDs of written external graphics states.
|
||||
ext_gs: HashMap<ExtGState, Ref>,
|
||||
/// The names and references for embedded files.
|
||||
embedded_files: BTreeMap<EcoString, Ref>,
|
||||
}
|
||||
|
||||
/// At this point, the references have been assigned to all resources. The page
|
||||
/// tree is going to be written, and given a reference. It is also at this point that
|
||||
/// the page contents is actually written.
|
||||
struct WithRefs<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
globals: GlobalRefs,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
resources: Resources,
|
||||
/// References that were allocated for resources.
|
||||
references: References,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
|
||||
fn from((previous, references): (WithGlobalRefs<'a>, References)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
globals: previous.globals,
|
||||
pages: previous.pages,
|
||||
resources: previous.resources,
|
||||
references,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// In this phase, we write resource dictionaries.
|
||||
///
|
||||
/// Each sub-resource gets its own isolated resource dictionary.
|
||||
struct WithEverything<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
globals: GlobalRefs,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
resources: Resources,
|
||||
references: References,
|
||||
/// Reference that was allocated for the page tree.
|
||||
page_tree_ref: Ref,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithEverything<'a>, ())> for WithEverything<'a> {
|
||||
fn from((this, _): (WithEverything<'a>, ())) -> Self {
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
|
||||
fn from((previous, page_tree_ref): (WithRefs<'a>, Ref)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
globals: previous.globals,
|
||||
resources: previous.resources,
|
||||
references: previous.references,
|
||||
pages: previous.pages,
|
||||
page_tree_ref,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PdfBuilder<WithDocument<'a>> {
|
||||
/// Start building a PDF for a Typst document.
|
||||
fn new(document: &'a PagedDocument, options: &'a PdfOptions<'a>) -> Self {
|
||||
Self {
|
||||
alloc: Ref::new(1),
|
||||
pdf: Pdf::new(),
|
||||
state: WithDocument { document, options },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> PdfBuilder<S> {
|
||||
/// Start a new phase, and save its output in the global state.
|
||||
fn phase<NS, B, O>(mut self, builder: B) -> SourceResult<PdfBuilder<NS>>
|
||||
where
|
||||
// New state
|
||||
NS: From<(S, O)>,
|
||||
// Builder
|
||||
B: Fn(&mut Self) -> SourceResult<O>,
|
||||
{
|
||||
let output = builder(&mut self)?;
|
||||
Ok(PdfBuilder {
|
||||
state: NS::from((self.state, output)),
|
||||
alloc: self.alloc,
|
||||
pdf: self.pdf,
|
||||
})
|
||||
}
|
||||
|
||||
/// Run a step with the current state, merges its output into the PDF file,
|
||||
/// and renumbers any references it returned.
|
||||
fn run<P, O>(&mut self, process: P) -> SourceResult<O>
|
||||
where
|
||||
// Process
|
||||
P: Fn(&S) -> SourceResult<(PdfChunk, O)>,
|
||||
// Output
|
||||
O: Renumber,
|
||||
{
|
||||
let (chunk, mut output) = process(&self.state)?;
|
||||
// Allocate a final reference for each temporary one
|
||||
let allocated = chunk.alloc.get() - TEMPORARY_REFS_START;
|
||||
let offset = TEMPORARY_REFS_START - self.alloc.get();
|
||||
|
||||
// Merge the chunk into the PDF, using the new references
|
||||
chunk.renumber_into(&mut self.pdf, |mut r| {
|
||||
r.renumber(offset);
|
||||
|
||||
r
|
||||
});
|
||||
|
||||
// Also update the references in the output
|
||||
output.renumber(offset);
|
||||
|
||||
self.alloc = Ref::new(self.alloc.get() + allocated);
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Finalize the PDF export and returns the buffer representing the
|
||||
/// document.
|
||||
fn export_with<P>(mut self, process: P) -> SourceResult<Vec<u8>>
|
||||
where
|
||||
P: Fn(S, &mut Pdf, &mut Ref) -> SourceResult<()>,
|
||||
{
|
||||
process(self.state, &mut self.pdf, &mut self.alloc)?;
|
||||
Ok(self.pdf.finish())
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference or collection of references that can be re-numbered,
|
||||
/// to become valid in a global scope.
|
||||
trait Renumber {
|
||||
/// Renumber this value by shifting any references it contains by `offset`.
|
||||
fn renumber(&mut self, offset: i32);
|
||||
}
|
||||
|
||||
impl Renumber for () {
|
||||
fn renumber(&mut self, _offset: i32) {}
|
||||
}
|
||||
|
||||
impl Renumber for Ref {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if self.get() >= TEMPORARY_REFS_START {
|
||||
*self = Ref::new(self.get() - offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Vec<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for item in self {
|
||||
item.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq + Hash, R: Renumber> Renumber for HashMap<T, R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for v in self.values_mut() {
|
||||
v.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Ord, R: Renumber> Renumber for BTreeMap<T, R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for v in self.values_mut() {
|
||||
v.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Option<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if let Some(r) = self {
|
||||
r.renumber(offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R: Renumber> Renumber for (T, R) {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.1.renumber(offset)
|
||||
}
|
||||
}
|
||||
|
||||
/// A portion of a PDF file.
|
||||
struct PdfChunk {
|
||||
/// The actual chunk.
|
||||
chunk: Chunk,
|
||||
/// A local allocator.
|
||||
alloc: Ref,
|
||||
}
|
||||
|
||||
/// Any reference below that value was already allocated before and
|
||||
/// should not be rewritten. Anything above was allocated in the current
|
||||
/// chunk, and should be remapped.
|
||||
///
|
||||
/// This is a constant (large enough to avoid collisions) and not
|
||||
/// dependent on self.alloc to allow for better memoization of steps, if
|
||||
/// needed in the future.
|
||||
const TEMPORARY_REFS_START: i32 = 1_000_000_000;
|
||||
|
||||
/// A part of a PDF document.
|
||||
impl PdfChunk {
|
||||
/// Start writing a new part of the document.
|
||||
fn new() -> Self {
|
||||
PdfChunk {
|
||||
chunk: Chunk::new(),
|
||||
alloc: Ref::new(TEMPORARY_REFS_START),
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate a reference that is valid in the context of this chunk.
|
||||
///
|
||||
/// References allocated with this function should be [renumbered](`Renumber::renumber`)
|
||||
/// before being used in other chunks. This is done automatically if these
|
||||
/// references are stored in the global `PdfBuilder` state.
|
||||
fn alloc(&mut self) -> Ref {
|
||||
self.alloc.bump()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PdfChunk {
|
||||
type Target = Chunk;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.chunk
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for PdfChunk {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.chunk
|
||||
}
|
||||
}
|
||||
|
||||
/// Compress data with the DEFLATE algorithm.
|
||||
fn deflate(data: &[u8]) -> Vec<u8> {
|
||||
const COMPRESSION_LEVEL: u8 = 6;
|
||||
miniz_oxide::deflate::compress_to_vec_zlib(data, COMPRESSION_LEVEL)
|
||||
}
|
||||
|
||||
/// Memoized and deferred version of [`deflate`] specialized for a page's content
|
||||
/// stream.
|
||||
#[comemo::memoize]
|
||||
fn deflate_deferred(content: Vec<u8>) -> Deferred<Vec<u8>> {
|
||||
Deferred::new(move || deflate(&content))
|
||||
}
|
||||
|
||||
/// Create a base64-encoded hash of the value.
|
||||
fn hash_base64<T: Hash>(value: &T) -> String {
|
||||
base64::engine::general_purpose::STANDARD
|
||||
.encode(typst_utils::hash128(value).to_be_bytes())
|
||||
}
|
||||
|
||||
/// Additional methods for [`Abs`].
|
||||
trait AbsExt {
|
||||
/// Convert an to a number of points.
|
||||
fn to_f32(self) -> f32;
|
||||
}
|
||||
|
||||
impl AbsExt for Abs {
|
||||
fn to_f32(self) -> f32 {
|
||||
self.to_pt() as f32
|
||||
}
|
||||
}
|
||||
|
||||
/// Additional methods for [`Em`].
|
||||
trait EmExt {
|
||||
/// Convert an em length to a number of PDF font units.
|
||||
fn to_font_units(self) -> f32;
|
||||
}
|
||||
|
||||
impl EmExt for Em {
|
||||
fn to_font_units(self) -> f32 {
|
||||
1000.0 * self.get() as f32
|
||||
}
|
||||
}
|
||||
|
||||
trait NameExt<'a> {
|
||||
/// The maximum length of a name in PDF/A.
|
||||
const PDFA_LIMIT: usize = 127;
|
||||
}
|
||||
|
||||
impl<'a> NameExt<'a> for Name<'a> {}
|
||||
|
||||
/// Additional methods for [`Str`].
|
||||
trait StrExt<'a>: Sized {
|
||||
/// The maximum length of a string in PDF/A.
|
||||
const PDFA_LIMIT: usize = 32767;
|
||||
|
||||
/// Create a string that satisfies the constraints of PDF/A.
|
||||
#[allow(unused)]
|
||||
fn trimmed(string: &'a [u8]) -> Self;
|
||||
}
|
||||
|
||||
impl<'a> StrExt<'a> for Str<'a> {
|
||||
fn trimmed(string: &'a [u8]) -> Self {
|
||||
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
|
||||
}
|
||||
}
|
||||
|
||||
/// Additional methods for [`TextStr`].
|
||||
trait TextStrExt<'a>: Sized {
|
||||
/// The maximum length of a string in PDF/A.
|
||||
const PDFA_LIMIT: usize = Str::PDFA_LIMIT;
|
||||
|
||||
/// Create a text string that satisfies the constraints of PDF/A.
|
||||
fn trimmed(string: &'a str) -> Self;
|
||||
}
|
||||
|
||||
impl<'a> TextStrExt<'a> for TextStr<'a> {
|
||||
fn trimmed(string: &'a str) -> Self {
|
||||
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [`Content`](pdf_writer::Content).
|
||||
trait ContentExt {
|
||||
fn save_state_checked(&mut self) -> SourceResult<()>;
|
||||
}
|
||||
|
||||
impl ContentExt for pdf_writer::Content {
|
||||
fn save_state_checked(&mut self) -> SourceResult<()> {
|
||||
self.save_state();
|
||||
if self.state_nesting_depth() > 28 {
|
||||
bail!(
|
||||
Span::detached(),
|
||||
"maximum PDF grouping depth exceeding";
|
||||
hint: "try to avoid excessive nesting of layout containers",
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to an array of floats.
|
||||
fn transform_to_array(ts: Transform) -> [f32; 6] {
|
||||
[
|
||||
ts.sx.get() as f32,
|
||||
ts.ky.get() as f32,
|
||||
ts.kx.get() as f32,
|
||||
ts.sy.get() as f32,
|
||||
ts.tx.to_f32(),
|
||||
ts.ty.to_f32(),
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_timestamp_new_local() {
|
||||
let dummy_datetime = Datetime::from_ymd_hms(2024, 12, 17, 10, 10, 10).unwrap();
|
||||
let test = |whole_minute_offset, expect_timezone| {
|
||||
assert_eq!(
|
||||
Timestamp::new_local(dummy_datetime, whole_minute_offset)
|
||||
.unwrap()
|
||||
.timezone,
|
||||
expect_timezone
|
||||
);
|
||||
};
|
||||
|
||||
// Valid timezone offsets
|
||||
test(0, Timezone::Local { hour_offset: 0, minute_offset: 0 });
|
||||
test(480, Timezone::Local { hour_offset: 8, minute_offset: 0 });
|
||||
test(-480, Timezone::Local { hour_offset: -8, minute_offset: 0 });
|
||||
test(330, Timezone::Local { hour_offset: 5, minute_offset: 30 });
|
||||
test(-210, Timezone::Local { hour_offset: -3, minute_offset: 30 });
|
||||
test(-720, Timezone::Local { hour_offset: -12, minute_offset: 0 }); // AoE
|
||||
|
||||
// Corner cases
|
||||
test(315, Timezone::Local { hour_offset: 5, minute_offset: 15 });
|
||||
test(-225, Timezone::Local { hour_offset: -3, minute_offset: 45 });
|
||||
test(1439, Timezone::Local { hour_offset: 23, minute_offset: 59 });
|
||||
test(-1439, Timezone::Local { hour_offset: -23, minute_offset: 59 });
|
||||
|
||||
// Invalid timezone offsets
|
||||
assert!(Timestamp::new_local(dummy_datetime, 1440).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, -1440).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, i32::MAX).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, i32::MIN).is_none());
|
||||
}
|
||||
/// PDF/A-3u.
|
||||
#[serde(rename = "a-3u")]
|
||||
A_3u,
|
||||
/// PDF/A-4.
|
||||
#[serde(rename = "a-4")]
|
||||
A_4,
|
||||
/// PDF/A-4f.
|
||||
#[serde(rename = "a-4f")]
|
||||
A_4f,
|
||||
/// PDF/A-4e.
|
||||
#[serde(rename = "a-4e")]
|
||||
A_4e,
|
||||
}
|
||||
|
94
crates/typst-pdf/src/link.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use krilla::action::{Action, LinkAction};
|
||||
use krilla::annotation::{LinkAnnotation, Target};
|
||||
use krilla::destination::XyzDestination;
|
||||
use krilla::geom::Rect;
|
||||
use typst_library::layout::{Abs, Point, Size};
|
||||
use typst_library::model::Destination;
|
||||
|
||||
use crate::convert::{FrameContext, GlobalContext};
|
||||
use crate::util::{AbsExt, PointExt};
|
||||
|
||||
pub(crate) fn handle_link(
|
||||
fc: &mut FrameContext,
|
||||
gc: &mut GlobalContext,
|
||||
dest: &Destination,
|
||||
size: Size,
|
||||
) {
|
||||
let mut min_x = Abs::inf();
|
||||
let mut min_y = Abs::inf();
|
||||
let mut max_x = -Abs::inf();
|
||||
let mut max_y = -Abs::inf();
|
||||
|
||||
let pos = Point::zero();
|
||||
|
||||
// Compute the bounding box of the transformed link.
|
||||
for point in [
|
||||
pos,
|
||||
pos + Point::with_x(size.x),
|
||||
pos + Point::with_y(size.y),
|
||||
pos + size.to_point(),
|
||||
] {
|
||||
let t = point.transform(fc.state().transform());
|
||||
min_x.set_min(t.x);
|
||||
min_y.set_min(t.y);
|
||||
max_x.set_max(t.x);
|
||||
max_y.set_max(t.y);
|
||||
}
|
||||
|
||||
let x1 = min_x.to_f32();
|
||||
let x2 = max_x.to_f32();
|
||||
let y1 = min_y.to_f32();
|
||||
let y2 = max_y.to_f32();
|
||||
|
||||
let rect = Rect::from_ltrb(x1, y1, x2, y2).unwrap();
|
||||
|
||||
// TODO: Support quad points.
|
||||
|
||||
let pos = match dest {
|
||||
Destination::Url(u) => {
|
||||
fc.push_annotation(
|
||||
LinkAnnotation::new(
|
||||
rect,
|
||||
None,
|
||||
Target::Action(Action::Link(LinkAction::new(u.to_string()))),
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
Destination::Position(p) => *p,
|
||||
Destination::Location(loc) => {
|
||||
if let Some(nd) = gc.loc_to_names.get(loc) {
|
||||
// If a named destination has been registered, it's already guaranteed to
|
||||
// not point to an excluded page.
|
||||
fc.push_annotation(
|
||||
LinkAnnotation::new(
|
||||
rect,
|
||||
None,
|
||||
Target::Destination(krilla::destination::Destination::Named(
|
||||
nd.clone(),
|
||||
)),
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
gc.document.introspector.position(*loc)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let page_index = pos.page.get() - 1;
|
||||
if let Some(index) = gc.page_index_converter.pdf_page_index(page_index) {
|
||||
fc.push_annotation(
|
||||
LinkAnnotation::new(
|
||||
rect,
|
||||
None,
|
||||
Target::Destination(krilla::destination::Destination::Xyz(
|
||||
XyzDestination::new(index, pos.point.to_krilla()),
|
||||
)),
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
184
crates/typst-pdf/src/metadata.rs
Normal file
@ -0,0 +1,184 @@
|
||||
use ecow::EcoString;
|
||||
use krilla::metadata::{Metadata, TextDirection};
|
||||
use typst_library::foundations::{Datetime, Smart};
|
||||
use typst_library::layout::Dir;
|
||||
use typst_library::text::Lang;
|
||||
|
||||
use crate::convert::GlobalContext;
|
||||
|
||||
pub(crate) fn build_metadata(gc: &GlobalContext) -> Metadata {
|
||||
let creator = format!("Typst {}", env!("CARGO_PKG_VERSION"));
|
||||
|
||||
let lang = gc.languages.iter().max_by_key(|(_, &count)| count).map(|(&l, _)| l);
|
||||
|
||||
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
|
||||
TextDirection::RightToLeft
|
||||
} else {
|
||||
TextDirection::LeftToRight
|
||||
};
|
||||
|
||||
let mut metadata = Metadata::new()
|
||||
.creator(creator)
|
||||
.keywords(gc.document.info.keywords.iter().map(EcoString::to_string).collect())
|
||||
.authors(gc.document.info.author.iter().map(EcoString::to_string).collect());
|
||||
|
||||
let lang = gc.languages.iter().max_by_key(|(_, &count)| count).map(|(&l, _)| l);
|
||||
|
||||
if let Some(lang) = lang {
|
||||
metadata = metadata.language(lang.as_str().to_string());
|
||||
}
|
||||
|
||||
if let Some(title) = &gc.document.info.title {
|
||||
metadata = metadata.title(title.to_string());
|
||||
}
|
||||
|
||||
if let Some(subject) = &gc.document.info.description {
|
||||
metadata = metadata.subject(subject.to_string());
|
||||
}
|
||||
|
||||
if let Some(ident) = gc.options.ident.custom() {
|
||||
metadata = metadata.document_id(ident.to_string());
|
||||
}
|
||||
|
||||
// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
|
||||
// (2) If the `document.date` is set to `auto` or not set, try to use the
|
||||
// date from the options.
|
||||
// (3) Otherwise, we don't write date metadata.
|
||||
let (date, tz) = match (gc.document.info.date, gc.options.timestamp) {
|
||||
(Smart::Custom(date), _) => (date, None),
|
||||
(Smart::Auto, Some(timestamp)) => {
|
||||
(Some(timestamp.datetime), Some(timestamp.timezone))
|
||||
}
|
||||
_ => (None, None),
|
||||
};
|
||||
|
||||
if let Some(date) = date.and_then(|d| convert_date(d, tz)) {
|
||||
metadata = metadata.creation_date(date);
|
||||
}
|
||||
|
||||
metadata = metadata.text_direction(dir);
|
||||
|
||||
metadata
|
||||
}
|
||||
|
||||
fn convert_date(
|
||||
datetime: Datetime,
|
||||
tz: Option<Timezone>,
|
||||
) -> Option<krilla::metadata::DateTime> {
|
||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||
|
||||
let mut kd = krilla::metadata::DateTime::new(year);
|
||||
|
||||
if let Some(month) = datetime.month() {
|
||||
kd = kd.month(month);
|
||||
}
|
||||
|
||||
if let Some(day) = datetime.day() {
|
||||
kd = kd.day(day);
|
||||
}
|
||||
|
||||
if let Some(h) = datetime.hour() {
|
||||
kd = kd.hour(h);
|
||||
}
|
||||
|
||||
if let Some(m) = datetime.minute() {
|
||||
kd = kd.minute(m);
|
||||
}
|
||||
|
||||
if let Some(s) = datetime.second() {
|
||||
kd = kd.second(s);
|
||||
}
|
||||
|
||||
match tz {
|
||||
Some(Timezone::UTC) => kd = kd.utc_offset_hour(0).utc_offset_minute(0),
|
||||
Some(Timezone::Local { hour_offset, minute_offset }) => {
|
||||
kd = kd.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
Some(kd)
|
||||
}
|
||||
|
||||
/// A timestamp with timezone information.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Timestamp {
|
||||
/// The datetime of the timestamp.
|
||||
pub(crate) datetime: Datetime,
|
||||
/// The timezone of the timestamp.
|
||||
pub(crate) timezone: Timezone,
|
||||
}
|
||||
|
||||
impl Timestamp {
|
||||
/// Create a new timestamp with a given datetime and UTC suffix.
|
||||
pub fn new_utc(datetime: Datetime) -> Self {
|
||||
Self { datetime, timezone: Timezone::UTC }
|
||||
}
|
||||
|
||||
/// Create a new timestamp with a given datetime, and a local timezone offset.
|
||||
pub fn new_local(datetime: Datetime, whole_minute_offset: i32) -> Option<Self> {
|
||||
let hour_offset = (whole_minute_offset / 60).try_into().ok()?;
|
||||
// Note: the `%` operator in Rust is the remainder operator, not the
|
||||
// modulo operator. The remainder operator can return negative results.
|
||||
// We can simply apply `abs` here because we assume the `minute_offset`
|
||||
// will have the same sign as `hour_offset`.
|
||||
let minute_offset = (whole_minute_offset % 60).abs().try_into().ok()?;
|
||||
match (hour_offset, minute_offset) {
|
||||
// Only accept valid timezone offsets with `-23 <= hours <= 23`,
|
||||
// and `0 <= minutes <= 59`.
|
||||
(-23..=23, 0..=59) => Some(Self {
|
||||
datetime,
|
||||
timezone: Timezone::Local { hour_offset, minute_offset },
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A timezone.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Timezone {
|
||||
/// The UTC timezone.
|
||||
UTC,
|
||||
/// The local timezone offset from UTC. And the `minute_offset` will have
|
||||
/// same sign as `hour_offset`.
|
||||
Local { hour_offset: i8, minute_offset: u8 },
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_timestamp_new_local() {
|
||||
let dummy_datetime = Datetime::from_ymd_hms(2024, 12, 17, 10, 10, 10).unwrap();
|
||||
let test = |whole_minute_offset, expect_timezone| {
|
||||
assert_eq!(
|
||||
Timestamp::new_local(dummy_datetime, whole_minute_offset)
|
||||
.unwrap()
|
||||
.timezone,
|
||||
expect_timezone
|
||||
);
|
||||
};
|
||||
|
||||
// Valid timezone offsets
|
||||
test(0, Timezone::Local { hour_offset: 0, minute_offset: 0 });
|
||||
test(480, Timezone::Local { hour_offset: 8, minute_offset: 0 });
|
||||
test(-480, Timezone::Local { hour_offset: -8, minute_offset: 0 });
|
||||
test(330, Timezone::Local { hour_offset: 5, minute_offset: 30 });
|
||||
test(-210, Timezone::Local { hour_offset: -3, minute_offset: 30 });
|
||||
test(-720, Timezone::Local { hour_offset: -12, minute_offset: 0 }); // AoE
|
||||
|
||||
// Corner cases
|
||||
test(315, Timezone::Local { hour_offset: 5, minute_offset: 15 });
|
||||
test(-225, Timezone::Local { hour_offset: -3, minute_offset: 45 });
|
||||
test(1439, Timezone::Local { hour_offset: 23, minute_offset: 59 });
|
||||
test(-1439, Timezone::Local { hour_offset: -23, minute_offset: 59 });
|
||||
|
||||
// Invalid timezone offsets
|
||||
assert!(Timestamp::new_local(dummy_datetime, 1440).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, -1440).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, i32::MAX).is_none());
|
||||
assert!(Timestamp::new_local(dummy_datetime, i32::MIN).is_none());
|
||||
}
|
||||
}
|
@ -1,86 +0,0 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use pdf_writer::writers::Destination;
|
||||
use pdf_writer::{Ref, Str};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::{Label, NativeElement};
|
||||
use typst_library::introspection::Location;
|
||||
use typst_library::layout::Abs;
|
||||
use typst_library::model::HeadingElem;
|
||||
|
||||
use crate::{AbsExt, PdfChunk, Renumber, StrExt, WithGlobalRefs};
|
||||
|
||||
/// A list of destinations in the PDF document (a specific point on a specific
|
||||
/// page), that have a name associated with them.
|
||||
///
|
||||
/// Typst creates a named destination for each heading in the document, that
|
||||
/// will then be written in the document catalog. PDF readers can then display
|
||||
/// them to show a clickable outline of the document.
|
||||
#[derive(Default)]
|
||||
pub struct NamedDestinations {
|
||||
/// A map between elements and their associated labels
|
||||
pub loc_to_dest: HashMap<Location, Label>,
|
||||
/// A sorted list of all named destinations.
|
||||
pub dests: Vec<(Label, Ref)>,
|
||||
}
|
||||
|
||||
impl Renumber for NamedDestinations {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for (_, reference) in &mut self.dests {
|
||||
reference.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fills in the map and vector for named destinations and writes the indirect
|
||||
/// destination objects.
|
||||
pub fn write_named_destinations(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, NamedDestinations)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = NamedDestinations::default();
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
// Find all headings that have a label and are the first among other
|
||||
// headings with the same label.
|
||||
let mut matches: Vec<_> = context
|
||||
.document
|
||||
.introspector
|
||||
.query(&HeadingElem::elem().select())
|
||||
.iter()
|
||||
.filter_map(|elem| elem.location().zip(elem.label()))
|
||||
.filter(|&(_, label)| seen.insert(label))
|
||||
.collect();
|
||||
|
||||
// Named destinations must be sorted by key.
|
||||
matches.sort_by_key(|&(_, label)| label.resolve());
|
||||
|
||||
for (loc, label) in matches {
|
||||
// Don't encode named destinations that would exceed the limit. Those
|
||||
// will instead be encoded as normal links.
|
||||
if label.resolve().len() > Str::PDFA_LIMIT {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pos = context.document.introspector.position(loc);
|
||||
let index = pos.page.get() - 1;
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
context.pages.get(index).zip(context.globals.pages.get(index))
|
||||
{
|
||||
let dest_ref = chunk.alloc();
|
||||
let x = pos.point.x.to_f32();
|
||||
let y = (page.content.size.y - y).to_f32();
|
||||
out.dests.push((label, dest_ref));
|
||||
out.loc_to_dest.insert(loc, label);
|
||||
chunk
|
||||
.indirect(dest_ref)
|
||||
.start::<Destination>()
|
||||
.page(*page_ref)
|
||||
.xyz(x, y, None);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
@ -1,18 +1,15 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use pdf_writer::{Finish, Pdf, Ref, TextStr};
|
||||
use krilla::destination::XyzDestination;
|
||||
use krilla::outline::{Outline, OutlineNode};
|
||||
use typst_library::foundations::{NativeElement, Packed, StyleChain};
|
||||
use typst_library::layout::Abs;
|
||||
use typst_library::model::HeadingElem;
|
||||
|
||||
use crate::{AbsExt, TextStrExt, WithEverything};
|
||||
use crate::convert::GlobalContext;
|
||||
use crate::util::AbsExt;
|
||||
|
||||
/// Construct the outline for the document.
|
||||
pub(crate) fn write_outline(
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
ctx: &WithEverything,
|
||||
) -> Option<Ref> {
|
||||
pub(crate) fn build_outline(gc: &GlobalContext) -> Outline {
|
||||
let mut tree: Vec<HeadingNode> = vec![];
|
||||
|
||||
// Stores the level of the topmost skipped ancestor of the next bookmarked
|
||||
@ -21,14 +18,14 @@ pub(crate) fn write_outline(
|
||||
// Therefore, its next descendant must be added at its level, which is
|
||||
// enforced in the manner shown below.
|
||||
let mut last_skipped_level = None;
|
||||
let elements = ctx.document.introspector.query(&HeadingElem::elem().select());
|
||||
let elements = &gc.document.introspector.query(&HeadingElem::elem().select());
|
||||
|
||||
for elem in elements.iter() {
|
||||
if let Some(page_ranges) = &ctx.options.page_ranges {
|
||||
if let Some(page_ranges) = &gc.options.page_ranges {
|
||||
if !page_ranges
|
||||
.includes_page(ctx.document.introspector.page(elem.location().unwrap()))
|
||||
.includes_page(gc.document.introspector.page(elem.location().unwrap()))
|
||||
{
|
||||
// Don't bookmark headings in non-exported pages
|
||||
// Don't bookmark headings in non-exported pages.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -95,39 +92,15 @@ pub(crate) fn write_outline(
|
||||
}
|
||||
}
|
||||
|
||||
if tree.is_empty() {
|
||||
return None;
|
||||
let mut outline = Outline::new();
|
||||
|
||||
for child in convert_nodes(&tree, gc) {
|
||||
outline.push_child(child);
|
||||
}
|
||||
|
||||
let root_id = alloc.bump();
|
||||
let start_ref = *alloc;
|
||||
let len = tree.len();
|
||||
|
||||
let mut prev_ref = None;
|
||||
for (i, node) in tree.iter().enumerate() {
|
||||
prev_ref = Some(write_outline_item(
|
||||
ctx,
|
||||
chunk,
|
||||
alloc,
|
||||
node,
|
||||
root_id,
|
||||
prev_ref,
|
||||
i + 1 == len,
|
||||
));
|
||||
outline
|
||||
}
|
||||
|
||||
chunk
|
||||
.outline(root_id)
|
||||
.first(start_ref)
|
||||
.last(Ref::new(
|
||||
alloc.get() - tree.last().map(|child| child.len() as i32).unwrap_or(1),
|
||||
))
|
||||
.count(tree.len() as i32);
|
||||
|
||||
Some(root_id)
|
||||
}
|
||||
|
||||
/// A heading in the outline panel.
|
||||
#[derive(Debug)]
|
||||
struct HeadingNode<'a> {
|
||||
element: &'a Packed<HeadingElem>,
|
||||
@ -149,73 +122,31 @@ impl<'a> HeadingNode<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
1 + self.children.iter().map(Self::len).sum::<usize>()
|
||||
}
|
||||
}
|
||||
fn to_krilla(&self, gc: &GlobalContext) -> Option<OutlineNode> {
|
||||
let loc = self.element.location().unwrap();
|
||||
let title = self.element.body.plain_text().to_string();
|
||||
let pos = gc.document.introspector.position(loc);
|
||||
let page_index = pos.page.get() - 1;
|
||||
|
||||
/// Write an outline item and all its children.
|
||||
fn write_outline_item(
|
||||
ctx: &WithEverything,
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
node: &HeadingNode,
|
||||
parent_ref: Ref,
|
||||
prev_ref: Option<Ref>,
|
||||
is_last: bool,
|
||||
) -> Ref {
|
||||
let id = alloc.bump();
|
||||
let next_ref = Ref::new(id.get() + node.len() as i32);
|
||||
|
||||
let mut outline = chunk.outline_item(id);
|
||||
outline.parent(parent_ref);
|
||||
|
||||
if !is_last {
|
||||
outline.next(next_ref);
|
||||
}
|
||||
|
||||
if let Some(prev_rev) = prev_ref {
|
||||
outline.prev(prev_rev);
|
||||
}
|
||||
|
||||
if let Some(last_immediate_child) = node.children.last() {
|
||||
outline.first(Ref::new(id.get() + 1));
|
||||
outline.last(Ref::new(next_ref.get() - last_immediate_child.len() as i32));
|
||||
outline.count(-(node.children.len() as i32));
|
||||
}
|
||||
|
||||
outline.title(TextStr::trimmed(node.element.body.plain_text().trim()));
|
||||
|
||||
let loc = node.element.location().unwrap();
|
||||
let pos = ctx.document.introspector.position(loc);
|
||||
let index = pos.page.get() - 1;
|
||||
|
||||
// Don't link to non-exported pages.
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
|
||||
{
|
||||
if let Some(index) = gc.page_index_converter.pdf_page_index(page_index) {
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
outline.dest().page(*page_ref).xyz(
|
||||
pos.point.x.to_f32(),
|
||||
(page.content.size.y - y).to_f32(),
|
||||
None,
|
||||
let dest = XyzDestination::new(
|
||||
index,
|
||||
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
|
||||
);
|
||||
|
||||
let mut outline_node = OutlineNode::new(title, dest);
|
||||
for child in convert_nodes(&self.children, gc) {
|
||||
outline_node.push_child(child);
|
||||
}
|
||||
|
||||
outline.finish();
|
||||
|
||||
let mut prev_ref = None;
|
||||
for (i, child) in node.children.iter().enumerate() {
|
||||
prev_ref = Some(write_outline_item(
|
||||
ctx,
|
||||
chunk,
|
||||
alloc,
|
||||
child,
|
||||
id,
|
||||
prev_ref,
|
||||
i + 1 == node.children.len(),
|
||||
));
|
||||
return Some(outline_node);
|
||||
}
|
||||
|
||||
id
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_nodes(nodes: &[HeadingNode], gc: &GlobalContext) -> Vec<OutlineNode> {
|
||||
nodes.iter().flat_map(|node| node.to_krilla(gc)).collect()
|
||||
}
|
||||
|
@ -1,248 +1,22 @@
|
||||
use std::collections::HashMap;
|
||||
use std::num::NonZeroU64;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use ecow::EcoString;
|
||||
use pdf_writer::types::{ActionType, AnnotationFlags, AnnotationType, NumberingStyle};
|
||||
use pdf_writer::{Filter, Finish, Name, Rect, Ref, Str};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::Label;
|
||||
use typst_library::introspection::Location;
|
||||
use typst_library::layout::{Abs, Page};
|
||||
use typst_library::model::{Destination, Numbering};
|
||||
use krilla::page::{NumberingStyle, PageLabel};
|
||||
use typst_library::model::Numbering;
|
||||
|
||||
use crate::{
|
||||
content, AbsExt, PdfChunk, PdfOptions, Resources, WithDocument, WithRefs,
|
||||
WithResources,
|
||||
};
|
||||
|
||||
/// Construct page objects.
|
||||
#[typst_macros::time(name = "construct pages")]
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn traverse_pages(
|
||||
state: &WithDocument,
|
||||
) -> SourceResult<(PdfChunk, (Vec<Option<EncodedPage>>, Resources<()>))> {
|
||||
let mut resources = Resources::default();
|
||||
let mut pages = Vec::with_capacity(state.document.pages.len());
|
||||
let mut skipped_pages = 0;
|
||||
for (i, page) in state.document.pages.iter().enumerate() {
|
||||
if state
|
||||
.options
|
||||
.page_ranges
|
||||
.as_ref()
|
||||
.is_some_and(|ranges| !ranges.includes_page_index(i))
|
||||
{
|
||||
// Don't export this page.
|
||||
pages.push(None);
|
||||
skipped_pages += 1;
|
||||
} else {
|
||||
let mut encoded = construct_page(state.options, &mut resources, page)?;
|
||||
encoded.label = page
|
||||
.numbering
|
||||
.as_ref()
|
||||
.and_then(|num| PdfPageLabel::generate(num, page.number))
|
||||
.or_else(|| {
|
||||
// When some pages were ignored from export, we show a page label with
|
||||
// the correct real (not logical) page number.
|
||||
// This is for consistency with normal output when pages have no numbering
|
||||
// and all are exported: the final PDF page numbers always correspond to
|
||||
// the real (not logical) page numbers. Here, the final PDF page number
|
||||
// will differ, but we can at least use labels to indicate what was
|
||||
// the corresponding real page number in the Typst document.
|
||||
(skipped_pages > 0).then(|| PdfPageLabel::arabic((i + 1) as u64))
|
||||
});
|
||||
pages.push(Some(encoded));
|
||||
}
|
||||
}
|
||||
|
||||
Ok((PdfChunk::new(), (pages, resources)))
|
||||
}
|
||||
|
||||
/// Construct a page object.
|
||||
#[typst_macros::time(name = "construct page")]
|
||||
fn construct_page(
|
||||
options: &PdfOptions,
|
||||
out: &mut Resources<()>,
|
||||
page: &Page,
|
||||
) -> SourceResult<EncodedPage> {
|
||||
Ok(EncodedPage {
|
||||
content: content::build(
|
||||
options,
|
||||
out,
|
||||
&page.frame,
|
||||
page.fill_or_transparent(),
|
||||
None,
|
||||
)?,
|
||||
label: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Allocate a reference for each exported page.
|
||||
pub fn alloc_page_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, Vec<Option<Ref>>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let page_refs = context
|
||||
.pages
|
||||
.iter()
|
||||
.map(|p| p.as_ref().map(|_| chunk.alloc()))
|
||||
.collect();
|
||||
Ok((chunk, page_refs))
|
||||
}
|
||||
|
||||
/// Write the page tree.
|
||||
pub fn write_page_tree(ctx: &WithRefs) -> SourceResult<(PdfChunk, Ref)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let page_tree_ref = chunk.alloc.bump();
|
||||
|
||||
for i in 0..ctx.pages.len() {
|
||||
let content_id = chunk.alloc.bump();
|
||||
write_page(
|
||||
&mut chunk,
|
||||
ctx,
|
||||
content_id,
|
||||
page_tree_ref,
|
||||
&ctx.references.named_destinations.loc_to_dest,
|
||||
i,
|
||||
);
|
||||
}
|
||||
|
||||
let page_kids = ctx.globals.pages.iter().filter_map(Option::as_ref).copied();
|
||||
|
||||
chunk
|
||||
.pages(page_tree_ref)
|
||||
.count(page_kids.clone().count() as i32)
|
||||
.kids(page_kids);
|
||||
|
||||
Ok((chunk, page_tree_ref))
|
||||
}
|
||||
|
||||
/// Write a page tree node.
|
||||
fn write_page(
|
||||
chunk: &mut PdfChunk,
|
||||
ctx: &WithRefs,
|
||||
content_id: Ref,
|
||||
page_tree_ref: Ref,
|
||||
loc_to_dest: &HashMap<Location, Label>,
|
||||
i: usize,
|
||||
) {
|
||||
let Some((page, page_ref)) = ctx.pages[i].as_ref().zip(ctx.globals.pages[i]) else {
|
||||
// Page excluded from export.
|
||||
return;
|
||||
};
|
||||
|
||||
let mut annotations = Vec::with_capacity(page.content.links.len());
|
||||
for (dest, rect) in &page.content.links {
|
||||
let id = chunk.alloc();
|
||||
annotations.push(id);
|
||||
|
||||
let mut annotation = chunk.annotation(id);
|
||||
annotation.subtype(AnnotationType::Link).rect(*rect);
|
||||
annotation.border(0.0, 0.0, 0.0, None).flags(AnnotationFlags::PRINT);
|
||||
|
||||
let pos = match dest {
|
||||
Destination::Url(uri) => {
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::Uri)
|
||||
.uri(Str(uri.as_bytes()));
|
||||
continue;
|
||||
}
|
||||
Destination::Position(pos) => *pos,
|
||||
Destination::Location(loc) => {
|
||||
if let Some(key) = loc_to_dest.get(loc) {
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::GoTo)
|
||||
// `key` must be a `Str`, not a `Name`.
|
||||
.pair(Name(b"D"), Str(key.resolve().as_bytes()));
|
||||
continue;
|
||||
} else {
|
||||
ctx.document.introspector.position(*loc)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let index = pos.page.get() - 1;
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
|
||||
// Don't add links to non-exported pages.
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
|
||||
{
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::GoTo)
|
||||
.destination()
|
||||
.page(*page_ref)
|
||||
.xyz(pos.point.x.to_f32(), (page.content.size.y - y).to_f32(), None);
|
||||
}
|
||||
}
|
||||
|
||||
let mut page_writer = chunk.page(page_ref);
|
||||
page_writer.parent(page_tree_ref);
|
||||
|
||||
let w = page.content.size.x.to_f32();
|
||||
let h = page.content.size.y.to_f32();
|
||||
page_writer.media_box(Rect::new(0.0, 0.0, w, h));
|
||||
page_writer.contents(content_id);
|
||||
page_writer.pair(Name(b"Resources"), ctx.resources.reference);
|
||||
|
||||
if page.content.uses_opacities {
|
||||
page_writer
|
||||
.group()
|
||||
.transparency()
|
||||
.isolated(false)
|
||||
.knockout(false)
|
||||
.color_space()
|
||||
.srgb();
|
||||
}
|
||||
|
||||
page_writer.annotations(annotations);
|
||||
|
||||
page_writer.finish();
|
||||
|
||||
chunk
|
||||
.stream(content_id, page.content.content.wait())
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
/// Specification for a PDF page label.
|
||||
#[derive(Debug, Clone, PartialEq, Hash, Default)]
|
||||
pub(crate) struct PdfPageLabel {
|
||||
/// Can be any string or none. Will always be prepended to the numbering style.
|
||||
pub prefix: Option<EcoString>,
|
||||
/// Based on the numbering pattern.
|
||||
///
|
||||
/// If `None` or numbering is a function, the field will be empty.
|
||||
pub style: Option<PdfPageLabelStyle>,
|
||||
/// Offset for the page label start.
|
||||
///
|
||||
/// Describes where to start counting from when setting a style.
|
||||
/// (Has to be greater or equal than 1)
|
||||
pub offset: Option<NonZeroU64>,
|
||||
}
|
||||
|
||||
/// A PDF page label number style.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum PdfPageLabelStyle {
|
||||
/// Decimal arabic numerals (1, 2, 3).
|
||||
Arabic,
|
||||
/// Lowercase roman numerals (i, ii, iii).
|
||||
LowerRoman,
|
||||
/// Uppercase roman numerals (I, II, III).
|
||||
UpperRoman,
|
||||
/// Lowercase letters (`a` to `z` for the first 26 pages,
|
||||
/// `aa` to `zz` and so on for the next).
|
||||
LowerAlpha,
|
||||
/// Uppercase letters (`A` to `Z` for the first 26 pages,
|
||||
/// `AA` to `ZZ` and so on for the next).
|
||||
UpperAlpha,
|
||||
}
|
||||
|
||||
impl PdfPageLabel {
|
||||
/// Create a new `PdfNumbering` from a `Numbering` applied to a page
|
||||
pub(crate) trait PageLabelExt {
|
||||
/// Create a new `PageLabel` from a `Numbering` applied to a page
|
||||
/// number.
|
||||
fn generate(numbering: &Numbering, number: u64) -> Option<PdfPageLabel> {
|
||||
fn generate(numbering: &Numbering, number: u64) -> Option<PageLabel>;
|
||||
|
||||
/// Creates an arabic page label with the specified page number.
|
||||
/// For example, this will display page label `11` when given the page
|
||||
/// number 11.
|
||||
fn arabic(number: u64) -> PageLabel;
|
||||
}
|
||||
|
||||
impl PageLabelExt for PageLabel {
|
||||
fn generate(numbering: &Numbering, number: u64) -> Option<PageLabel> {
|
||||
{
|
||||
let Numbering::Pattern(pat) = numbering else {
|
||||
return None;
|
||||
};
|
||||
@ -252,8 +26,8 @@ impl PdfPageLabel {
|
||||
// If there is a suffix, we cannot use the common style optimisation,
|
||||
// since PDF does not provide a suffix field.
|
||||
let style = if pat.suffix.is_empty() {
|
||||
use krilla::page::NumberingStyle as Style;
|
||||
use typst_library::model::NumberingKind as Kind;
|
||||
use PdfPageLabelStyle as Style;
|
||||
match kind {
|
||||
Kind::Arabic => Some(Style::Arabic),
|
||||
Kind::LowerRoman => Some(Style::LowerRoman),
|
||||
@ -275,36 +49,16 @@ impl PdfPageLabel {
|
||||
(!prefix.is_empty()).then(|| prefix.clone())
|
||||
};
|
||||
|
||||
let offset = style.and(NonZeroU64::new(number));
|
||||
Some(PdfPageLabel { prefix, style, offset })
|
||||
}
|
||||
|
||||
/// Creates an arabic page label with the specified page number.
|
||||
/// For example, this will display page label `11` when given the page
|
||||
/// number 11.
|
||||
fn arabic(number: u64) -> PdfPageLabel {
|
||||
PdfPageLabel {
|
||||
prefix: None,
|
||||
style: Some(PdfPageLabelStyle::Arabic),
|
||||
offset: NonZeroU64::new(number),
|
||||
}
|
||||
let offset = style.and(number.try_into().ok().and_then(NonZeroUsize::new));
|
||||
Some(PageLabel::new(style, prefix.map(|s| s.to_string()), offset))
|
||||
}
|
||||
}
|
||||
|
||||
impl PdfPageLabelStyle {
|
||||
pub fn to_pdf_numbering_style(self) -> NumberingStyle {
|
||||
match self {
|
||||
PdfPageLabelStyle::Arabic => NumberingStyle::Arabic,
|
||||
PdfPageLabelStyle::LowerRoman => NumberingStyle::LowerRoman,
|
||||
PdfPageLabelStyle::UpperRoman => NumberingStyle::UpperRoman,
|
||||
PdfPageLabelStyle::LowerAlpha => NumberingStyle::LowerAlpha,
|
||||
PdfPageLabelStyle::UpperAlpha => NumberingStyle::UpperAlpha,
|
||||
fn arabic(number: u64) -> PageLabel {
|
||||
PageLabel::new(
|
||||
Some(NumberingStyle::Arabic),
|
||||
None,
|
||||
number.try_into().ok().and_then(NonZeroUsize::new),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Data for an exported page.
|
||||
pub struct EncodedPage {
|
||||
pub content: content::Encoded,
|
||||
pub label: Option<PdfPageLabel>,
|
||||
}
|
||||
|
379
crates/typst-pdf/src/paint.rs
Normal file
@ -0,0 +1,379 @@
|
||||
//! Convert paint types from typst to krilla.
|
||||
|
||||
use krilla::color::{self, cmyk, luma, rgb};
|
||||
use krilla::num::NormalizedF32;
|
||||
use krilla::paint::{
|
||||
Fill, LinearGradient, Pattern, RadialGradient, SpreadMethod, Stop, Stroke,
|
||||
StrokeDash, SweepGradient,
|
||||
};
|
||||
use krilla::surface::Surface;
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::layout::{Abs, Angle, Quadrant, Ratio, Size, Transform};
|
||||
use typst_library::visualize::{
|
||||
Color, ColorSpace, DashPattern, FillRule, FixedStroke, Gradient, Paint, RatioOrAngle,
|
||||
RelativeTo, Tiling, WeightedColor,
|
||||
};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use crate::convert::{handle_frame, FrameContext, GlobalContext, State};
|
||||
use crate::util::{AbsExt, FillRuleExt, LineCapExt, LineJoinExt, TransformExt};
|
||||
|
||||
pub(crate) fn convert_fill(
|
||||
gc: &mut GlobalContext,
|
||||
paint_: &Paint,
|
||||
fill_rule_: FillRule,
|
||||
on_text: bool,
|
||||
surface: &mut Surface,
|
||||
state: &State,
|
||||
size: Size,
|
||||
) -> SourceResult<Fill> {
|
||||
let (paint, opacity) = convert_paint(gc, paint_, on_text, surface, state, size)?;
|
||||
|
||||
Ok(Fill {
|
||||
paint,
|
||||
rule: fill_rule_.to_krilla(),
|
||||
opacity: NormalizedF32::new(opacity as f32 / 255.0).unwrap(),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn convert_stroke(
|
||||
fc: &mut GlobalContext,
|
||||
stroke: &FixedStroke,
|
||||
on_text: bool,
|
||||
surface: &mut Surface,
|
||||
state: &State,
|
||||
size: Size,
|
||||
) -> SourceResult<Stroke> {
|
||||
let (paint, opacity) =
|
||||
convert_paint(fc, &stroke.paint, on_text, surface, state, size)?;
|
||||
|
||||
Ok(Stroke {
|
||||
paint,
|
||||
width: stroke.thickness.to_f32(),
|
||||
miter_limit: stroke.miter_limit.get() as f32,
|
||||
line_join: stroke.join.to_krilla(),
|
||||
line_cap: stroke.cap.to_krilla(),
|
||||
opacity: NormalizedF32::new(opacity as f32 / 255.0).unwrap(),
|
||||
dash: stroke.dash.as_ref().map(convert_dash),
|
||||
})
|
||||
}
|
||||
|
||||
fn convert_paint(
|
||||
gc: &mut GlobalContext,
|
||||
paint: &Paint,
|
||||
on_text: bool,
|
||||
surface: &mut Surface,
|
||||
state: &State,
|
||||
mut size: Size,
|
||||
) -> SourceResult<(krilla::paint::Paint, u8)> {
|
||||
// Edge cases for strokes.
|
||||
if size.x.is_zero() {
|
||||
size.x = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
if size.y.is_zero() {
|
||||
size.y = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
match paint {
|
||||
Paint::Solid(c) => {
|
||||
let (c, a) = convert_solid(c);
|
||||
Ok((c.into(), a))
|
||||
}
|
||||
Paint::Gradient(g) => Ok(convert_gradient(g, on_text, state, size)),
|
||||
Paint::Tiling(p) => convert_pattern(gc, p, on_text, surface, state),
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_solid(color: &Color) -> (color::Color, u8) {
|
||||
match color.space() {
|
||||
ColorSpace::D65Gray => {
|
||||
let (c, a) = convert_luma(color);
|
||||
(c.into(), a)
|
||||
}
|
||||
ColorSpace::Cmyk => (convert_cmyk(color).into(), 255),
|
||||
// Convert all other colors in different colors spaces into RGB.
|
||||
_ => {
|
||||
let (c, a) = convert_rgb(color);
|
||||
(c.into(), a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_cmyk(color: &Color) -> cmyk::Color {
|
||||
let components = color.to_space(ColorSpace::Cmyk).to_vec4_u8();
|
||||
|
||||
cmyk::Color::new(components[0], components[1], components[2], components[3])
|
||||
}
|
||||
|
||||
fn convert_rgb(color: &Color) -> (rgb::Color, u8) {
|
||||
let components = color.to_space(ColorSpace::Srgb).to_vec4_u8();
|
||||
(rgb::Color::new(components[0], components[1], components[2]), components[3])
|
||||
}
|
||||
|
||||
fn convert_luma(color: &Color) -> (luma::Color, u8) {
|
||||
let components = color.to_space(ColorSpace::D65Gray).to_vec4_u8();
|
||||
(luma::Color::new(components[0]), components[3])
|
||||
}
|
||||
|
||||
fn convert_pattern(
|
||||
gc: &mut GlobalContext,
|
||||
pattern: &Tiling,
|
||||
on_text: bool,
|
||||
surface: &mut Surface,
|
||||
state: &State,
|
||||
) -> SourceResult<(krilla::paint::Paint, u8)> {
|
||||
let transform = correct_transform(state, pattern.unwrap_relative(on_text));
|
||||
|
||||
let mut stream_builder = surface.stream_builder();
|
||||
let mut surface = stream_builder.surface();
|
||||
let mut fc = FrameContext::new(pattern.frame().size());
|
||||
handle_frame(&mut fc, pattern.frame(), None, &mut surface, gc)?;
|
||||
surface.finish();
|
||||
let stream = stream_builder.finish();
|
||||
let pattern = Pattern {
|
||||
stream,
|
||||
transform: transform.to_krilla(),
|
||||
width: (pattern.size().x + pattern.spacing().x).to_pt() as _,
|
||||
height: (pattern.size().y + pattern.spacing().y).to_pt() as _,
|
||||
};
|
||||
|
||||
Ok((pattern.into(), 255))
|
||||
}
|
||||
|
||||
fn convert_gradient(
|
||||
gradient: &Gradient,
|
||||
on_text: bool,
|
||||
state: &State,
|
||||
size: Size,
|
||||
) -> (krilla::paint::Paint, u8) {
|
||||
let size = match gradient.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => size,
|
||||
RelativeTo::Parent => state.container_size(),
|
||||
};
|
||||
|
||||
let angle = gradient.angle().unwrap_or_else(Angle::zero);
|
||||
let base_transform = correct_transform(state, gradient.unwrap_relative(on_text));
|
||||
let stops = convert_gradient_stops(gradient);
|
||||
match &gradient {
|
||||
Gradient::Linear(_) => {
|
||||
let (x1, y1, x2, y2) = {
|
||||
let (mut sin, mut cos) = (angle.sin(), angle.cos());
|
||||
|
||||
// Scale to edges of unit square.
|
||||
let factor = cos.abs() + sin.abs();
|
||||
sin *= factor;
|
||||
cos *= factor;
|
||||
|
||||
match angle.quadrant() {
|
||||
Quadrant::First => (0.0, 0.0, cos as f32, sin as f32),
|
||||
Quadrant::Second => (1.0, 0.0, cos as f32 + 1.0, sin as f32),
|
||||
Quadrant::Third => (1.0, 1.0, cos as f32 + 1.0, sin as f32 + 1.0),
|
||||
Quadrant::Fourth => (0.0, 1.0, cos as f32, sin as f32 + 1.0),
|
||||
}
|
||||
};
|
||||
|
||||
let linear = LinearGradient {
|
||||
x1,
|
||||
y1,
|
||||
x2,
|
||||
y2,
|
||||
// x and y coordinates are normalized, so need to scale by the size.
|
||||
transform: base_transform
|
||||
.pre_concat(Transform::scale(
|
||||
Ratio::new(size.x.to_f32() as f64),
|
||||
Ratio::new(size.y.to_f32() as f64),
|
||||
))
|
||||
.to_krilla(),
|
||||
spread_method: SpreadMethod::Pad,
|
||||
stops,
|
||||
anti_alias: gradient.anti_alias(),
|
||||
};
|
||||
|
||||
(linear.into(), 255)
|
||||
}
|
||||
Gradient::Radial(radial) => {
|
||||
let radial = RadialGradient {
|
||||
fx: radial.focal_center.x.get() as f32,
|
||||
fy: radial.focal_center.y.get() as f32,
|
||||
fr: radial.focal_radius.get() as f32,
|
||||
cx: radial.center.x.get() as f32,
|
||||
cy: radial.center.y.get() as f32,
|
||||
cr: radial.radius.get() as f32,
|
||||
transform: base_transform
|
||||
.pre_concat(Transform::scale(
|
||||
Ratio::new(size.x.to_f32() as f64),
|
||||
Ratio::new(size.y.to_f32() as f64),
|
||||
))
|
||||
.to_krilla(),
|
||||
spread_method: SpreadMethod::Pad,
|
||||
stops,
|
||||
anti_alias: gradient.anti_alias(),
|
||||
};
|
||||
|
||||
(radial.into(), 255)
|
||||
}
|
||||
Gradient::Conic(conic) => {
|
||||
// Correct the gradient's angle.
|
||||
let cx = size.x.to_f32() * conic.center.x.get() as f32;
|
||||
let cy = size.y.to_f32() * conic.center.y.get() as f32;
|
||||
let actual_transform = base_transform
|
||||
// Adjust for the angle.
|
||||
.pre_concat(Transform::rotate_at(
|
||||
angle,
|
||||
Abs::pt(cx as f64),
|
||||
Abs::pt(cy as f64),
|
||||
))
|
||||
// Default start point in krilla and typst are at the opposite side, so we need
|
||||
// to flip it horizontally.
|
||||
.pre_concat(Transform::scale_at(
|
||||
-Ratio::one(),
|
||||
Ratio::one(),
|
||||
Abs::pt(cx as f64),
|
||||
Abs::pt(cy as f64),
|
||||
));
|
||||
|
||||
let sweep = SweepGradient {
|
||||
cx,
|
||||
cy,
|
||||
start_angle: 0.0,
|
||||
end_angle: 360.0,
|
||||
transform: actual_transform.to_krilla(),
|
||||
spread_method: SpreadMethod::Pad,
|
||||
stops,
|
||||
anti_alias: gradient.anti_alias(),
|
||||
};
|
||||
|
||||
(sweep.into(), 255)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_gradient_stops(gradient: &Gradient) -> Vec<Stop> {
|
||||
let mut stops = vec![];
|
||||
|
||||
let use_cmyk = gradient.stops().iter().all(|s| s.color.space() == ColorSpace::Cmyk);
|
||||
|
||||
let mut add_single = |color: &Color, offset: Ratio| {
|
||||
let (color, opacity) = if use_cmyk {
|
||||
(convert_cmyk(color).into(), 255)
|
||||
} else {
|
||||
let (c, a) = convert_rgb(color);
|
||||
(c.into(), a)
|
||||
};
|
||||
|
||||
let opacity = NormalizedF32::new((opacity as f32) / 255.0).unwrap();
|
||||
let offset = NormalizedF32::new(offset.get() as f32).unwrap();
|
||||
let stop = Stop { offset, color, opacity };
|
||||
stops.push(stop);
|
||||
};
|
||||
|
||||
// Convert stops.
|
||||
match &gradient {
|
||||
Gradient::Linear(_) | Gradient::Radial(_) => {
|
||||
if let Some(s) = gradient.stops().first() {
|
||||
add_single(&s.color, s.offset.unwrap());
|
||||
}
|
||||
|
||||
// Create the individual gradient functions for each pair of stops.
|
||||
for window in gradient.stops().windows(2) {
|
||||
let (first, second) = (window[0], window[1]);
|
||||
|
||||
// If we have a hue index or are using Oklab, we will create several
|
||||
// stops in-between to make the gradient smoother without interpolation
|
||||
// issues with native color spaces.
|
||||
if gradient.space().hue_index().is_some() {
|
||||
for i in 0..=32 {
|
||||
let t = i as f64 / 32.0;
|
||||
let real_t = Ratio::new(
|
||||
first.offset.unwrap().get() * (1.0 - t)
|
||||
+ second.offset.unwrap().get() * t,
|
||||
);
|
||||
|
||||
let c = gradient.sample(RatioOrAngle::Ratio(real_t));
|
||||
add_single(&c, real_t);
|
||||
}
|
||||
}
|
||||
|
||||
add_single(&second.color, second.offset.unwrap());
|
||||
}
|
||||
}
|
||||
Gradient::Conic(conic) => {
|
||||
if let Some((c, t)) = conic.stops.first() {
|
||||
add_single(c, *t);
|
||||
}
|
||||
|
||||
for window in conic.stops.windows(2) {
|
||||
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
|
||||
|
||||
// Precision:
|
||||
// - On an even color, insert a stop every 90deg.
|
||||
// - For a hue-based color space, insert 200 stops minimum.
|
||||
// - On any other, insert 20 stops minimum.
|
||||
let max_dt = if c0 == c1 {
|
||||
0.25
|
||||
} else if conic.space.hue_index().is_some() {
|
||||
0.005
|
||||
} else {
|
||||
0.05
|
||||
};
|
||||
|
||||
let mut t_x = t0.get();
|
||||
let dt = (t1.get() - t0.get()).min(max_dt);
|
||||
|
||||
// Special casing for sharp gradients.
|
||||
if t0 == t1 {
|
||||
add_single(&c1, t1);
|
||||
continue;
|
||||
}
|
||||
|
||||
while t_x < t1.get() {
|
||||
let t_next = (t_x + dt).min(t1.get());
|
||||
|
||||
// The current progress in the current window.
|
||||
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
|
||||
|
||||
let c_next = Color::mix_iter(
|
||||
[
|
||||
WeightedColor::new(c0, 1.0 - t(t_next)),
|
||||
WeightedColor::new(c1, t(t_next)),
|
||||
],
|
||||
conic.space,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
add_single(&c_next, Ratio::new(t_next));
|
||||
t_x = t_next;
|
||||
}
|
||||
|
||||
add_single(&c1, t1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stops
|
||||
}
|
||||
|
||||
fn convert_dash(dash: &DashPattern<Abs, Abs>) -> StrokeDash {
|
||||
StrokeDash {
|
||||
array: dash.array.iter().map(|e| e.to_f32()).collect(),
|
||||
offset: dash.phase.to_f32(),
|
||||
}
|
||||
}
|
||||
|
||||
fn correct_transform(state: &State, relative: RelativeTo) -> Transform {
|
||||
// In krilla, if we have a shape with a transform and a complex paint,
|
||||
// then the paint will inherit the transform of the shape.
|
||||
match relative {
|
||||
// Because of the above, we don't need to apply an additional transform here.
|
||||
RelativeTo::Self_ => Transform::identity(),
|
||||
// Because of the above, we need to first reverse the transform that will be
|
||||
// applied from the shape, and then re-apply the transform that is used for
|
||||
// the next parent container.
|
||||
RelativeTo::Parent => state
|
||||
.transform()
|
||||
.invert()
|
||||
.unwrap()
|
||||
.pre_concat(state.container_transform()),
|
||||
}
|
||||
}
|
@ -1,349 +0,0 @@
|
||||
//! PDF resources.
|
||||
//!
|
||||
//! Resources are defined in dictionaries. They map identifiers such as `Im0` to
|
||||
//! a PDF reference. Each [content stream] is associated with a resource dictionary.
|
||||
//! The identifiers defined in the resources can then be used in content streams.
|
||||
//!
|
||||
//! [content stream]: `crate::content`
|
||||
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::hash::Hash;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use pdf_writer::{Dict, Finish, Name, Ref};
|
||||
use subsetter::GlyphRemapper;
|
||||
use typst_library::diag::{SourceResult, StrResult};
|
||||
use typst_library::text::{Font, Lang};
|
||||
use typst_library::visualize::Image;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::Deferred;
|
||||
|
||||
use crate::color::ColorSpaces;
|
||||
use crate::color_font::ColorFontMap;
|
||||
use crate::extg::ExtGState;
|
||||
use crate::gradient::PdfGradient;
|
||||
use crate::image::EncodedImage;
|
||||
use crate::tiling::TilingRemapper;
|
||||
use crate::{PdfChunk, Renumber, WithEverything, WithResources};
|
||||
|
||||
/// All the resources that have been collected when traversing the document.
|
||||
///
|
||||
/// This does not allocate references to resources, only track what was used
|
||||
/// and deduplicate what can be deduplicated.
|
||||
///
|
||||
/// You may notice that this structure is a tree: [`TilingRemapper`] and
|
||||
/// [`ColorFontMap`] (that are present in the fields of [`Resources`]),
|
||||
/// themselves contain [`Resources`] (that will be called "sub-resources" from
|
||||
/// now on). Because color glyphs and tilings are defined using content
|
||||
/// streams, just like pages, they can refer to resources too, which are tracked
|
||||
/// by the respective sub-resources.
|
||||
///
|
||||
/// Each instance of this structure will become a `/Resources` dictionary in
|
||||
/// the final PDF. It is not possible to use a single shared dictionary for all
|
||||
/// pages, tilings and color fonts, because if a resource is listed in its own
|
||||
/// `/Resources` dictionary, some PDF readers will fail to open the document.
|
||||
///
|
||||
/// Because we need to lazily initialize sub-resources (we don't know how deep
|
||||
/// the tree will be before reading the document), and that this is done in a
|
||||
/// context where no PDF reference allocator is available, `Resources` are
|
||||
/// originally created with the type parameter `R = ()`. The reference for each
|
||||
/// dictionary will only be allocated in the next phase, once we know the shape
|
||||
/// of the tree, at which point `R` becomes `Ref`. No other value of `R` should
|
||||
/// ever exist.
|
||||
pub struct Resources<R = Ref> {
|
||||
/// The global reference to this resource dictionary, or `()` if it has not
|
||||
/// been allocated yet.
|
||||
pub reference: R,
|
||||
|
||||
/// Handles color space writing.
|
||||
pub colors: ColorSpaces,
|
||||
|
||||
/// Deduplicates fonts used across the document.
|
||||
pub fonts: Remapper<Font>,
|
||||
/// Deduplicates images used across the document.
|
||||
pub images: Remapper<Image>,
|
||||
/// Handles to deferred image conversions.
|
||||
pub deferred_images: HashMap<usize, (Deferred<StrResult<EncodedImage>>, Span)>,
|
||||
/// Deduplicates gradients used across the document.
|
||||
pub gradients: Remapper<PdfGradient>,
|
||||
/// Deduplicates tilings used across the document.
|
||||
pub tilings: Option<Box<TilingRemapper<R>>>,
|
||||
/// Deduplicates external graphics states used across the document.
|
||||
pub ext_gs: Remapper<ExtGState>,
|
||||
/// Deduplicates color glyphs.
|
||||
pub color_fonts: Option<Box<ColorFontMap<R>>>,
|
||||
|
||||
// The fields below do not correspond to actual resources that will be
|
||||
// written in a dictionary, but are more meta-data about resources that
|
||||
// can't really live somewhere else.
|
||||
/// The number of glyphs for all referenced languages in the content stream.
|
||||
/// We keep track of this to determine the main document language.
|
||||
/// BTreeMap is used to write sorted list of languages to metadata.
|
||||
pub languages: BTreeMap<Lang, usize>,
|
||||
|
||||
/// For each font a mapping from used glyphs to their text representation.
|
||||
/// This is used for the PDF's /ToUnicode map, and important for copy-paste
|
||||
/// and searching.
|
||||
///
|
||||
/// Note that the text representation may contain multiple chars in case of
|
||||
/// ligatures or similar things, and it may have no entry in the font's cmap
|
||||
/// (or only a private-use codepoint), like the “Th” in Linux Libertine.
|
||||
///
|
||||
/// A glyph may have multiple entries in the font's cmap, and even the same
|
||||
/// glyph can have a different text representation within one document.
|
||||
/// But /ToUnicode does not support that, so we just save the first occurrence.
|
||||
pub glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
|
||||
/// Same as `glyph_sets`, but for color fonts.
|
||||
pub color_glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
|
||||
/// Stores the glyph remapper for each font for the subsetter.
|
||||
pub glyph_remappers: HashMap<Font, GlyphRemapper>,
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Resources<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.reference.renumber(offset);
|
||||
|
||||
if let Some(color_fonts) = &mut self.color_fonts {
|
||||
color_fonts.resources.renumber(offset);
|
||||
}
|
||||
|
||||
if let Some(tilings) = &mut self.tilings {
|
||||
tilings.resources.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Resources<()> {
|
||||
fn default() -> Self {
|
||||
Resources {
|
||||
reference: (),
|
||||
colors: ColorSpaces::default(),
|
||||
fonts: Remapper::new("F"),
|
||||
images: Remapper::new("Im"),
|
||||
deferred_images: HashMap::new(),
|
||||
gradients: Remapper::new("Gr"),
|
||||
tilings: None,
|
||||
ext_gs: Remapper::new("Gs"),
|
||||
color_fonts: None,
|
||||
languages: BTreeMap::new(),
|
||||
glyph_sets: HashMap::new(),
|
||||
color_glyph_sets: HashMap::new(),
|
||||
glyph_remappers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Resources<()> {
|
||||
/// Associate a reference with this resource dictionary (and do so
|
||||
/// recursively for sub-resources).
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> Resources<Ref> {
|
||||
Resources {
|
||||
reference: refs.reference,
|
||||
colors: self.colors,
|
||||
fonts: self.fonts,
|
||||
images: self.images,
|
||||
deferred_images: self.deferred_images,
|
||||
gradients: self.gradients,
|
||||
tilings: self
|
||||
.tilings
|
||||
.zip(refs.tilings.as_ref())
|
||||
.map(|(p, r)| Box::new(p.with_refs(r))),
|
||||
ext_gs: self.ext_gs,
|
||||
color_fonts: self
|
||||
.color_fonts
|
||||
.zip(refs.color_fonts.as_ref())
|
||||
.map(|(c, r)| Box::new(c.with_refs(r))),
|
||||
languages: self.languages,
|
||||
glyph_sets: self.glyph_sets,
|
||||
color_glyph_sets: self.color_glyph_sets,
|
||||
glyph_remappers: self.glyph_remappers,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Resources<R> {
|
||||
/// Run a function on this resource dictionary and all
|
||||
/// of its sub-resources.
|
||||
pub fn traverse<P>(&self, process: &mut P) -> SourceResult<()>
|
||||
where
|
||||
P: FnMut(&Self) -> SourceResult<()>,
|
||||
{
|
||||
process(self)?;
|
||||
if let Some(color_fonts) = &self.color_fonts {
|
||||
color_fonts.resources.traverse(process)?;
|
||||
}
|
||||
if let Some(tilings) = &self.tilings {
|
||||
tilings.resources.traverse(process)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// References for a resource tree.
|
||||
///
|
||||
/// This structure is a tree too, that should have the same structure as the
|
||||
/// corresponding `Resources`.
|
||||
pub struct ResourcesRefs {
|
||||
pub reference: Ref,
|
||||
pub color_fonts: Option<Box<ResourcesRefs>>,
|
||||
pub tilings: Option<Box<ResourcesRefs>>,
|
||||
}
|
||||
|
||||
impl Renumber for ResourcesRefs {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.reference.renumber(offset);
|
||||
if let Some(color_fonts) = &mut self.color_fonts {
|
||||
color_fonts.renumber(offset);
|
||||
}
|
||||
if let Some(tilings) = &mut self.tilings {
|
||||
tilings.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate references for all resource dictionaries.
|
||||
pub fn alloc_resources_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, ResourcesRefs)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
/// Recursively explore resource dictionaries and assign them references.
|
||||
fn refs_for(resources: &Resources<()>, chunk: &mut PdfChunk) -> ResourcesRefs {
|
||||
ResourcesRefs {
|
||||
reference: chunk.alloc(),
|
||||
color_fonts: resources
|
||||
.color_fonts
|
||||
.as_ref()
|
||||
.map(|c| Box::new(refs_for(&c.resources, chunk))),
|
||||
tilings: resources
|
||||
.tilings
|
||||
.as_ref()
|
||||
.map(|p| Box::new(refs_for(&p.resources, chunk))),
|
||||
}
|
||||
}
|
||||
|
||||
let refs = refs_for(&context.resources, &mut chunk);
|
||||
Ok((chunk, refs))
|
||||
}
|
||||
|
||||
/// Write the resource dictionaries that will be referenced by all pages.
|
||||
///
|
||||
/// We add a reference to this dictionary to each page individually instead of
|
||||
/// to the root node of the page tree because using the resource inheritance
|
||||
/// feature breaks PDF merging with Apple Preview.
|
||||
///
|
||||
/// Also write resource dictionaries for Type3 fonts and PDF patterns.
|
||||
pub fn write_resource_dictionaries(ctx: &WithEverything) -> SourceResult<(PdfChunk, ())> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut used_color_spaces = ColorSpaces::default();
|
||||
|
||||
ctx.resources.traverse(&mut |resources| {
|
||||
used_color_spaces.merge(&resources.colors);
|
||||
|
||||
let images_ref = chunk.alloc.bump();
|
||||
let patterns_ref = chunk.alloc.bump();
|
||||
let ext_gs_states_ref = chunk.alloc.bump();
|
||||
let color_spaces_ref = chunk.alloc.bump();
|
||||
|
||||
let mut color_font_slices = Vec::new();
|
||||
let mut color_font_numbers = HashMap::new();
|
||||
if let Some(color_fonts) = &resources.color_fonts {
|
||||
for (_, font_slice) in color_fonts.iter() {
|
||||
color_font_numbers.insert(font_slice.clone(), color_font_slices.len());
|
||||
color_font_slices.push(font_slice);
|
||||
}
|
||||
}
|
||||
let color_font_remapper = Remapper {
|
||||
prefix: "Cf",
|
||||
to_pdf: color_font_numbers,
|
||||
to_items: color_font_slices,
|
||||
};
|
||||
|
||||
resources
|
||||
.images
|
||||
.write(&ctx.references.images, &mut chunk.indirect(images_ref).dict());
|
||||
|
||||
let mut patterns_dict = chunk.indirect(patterns_ref).dict();
|
||||
resources
|
||||
.gradients
|
||||
.write(&ctx.references.gradients, &mut patterns_dict);
|
||||
if let Some(p) = &resources.tilings {
|
||||
p.remapper.write(&ctx.references.tilings, &mut patterns_dict);
|
||||
}
|
||||
patterns_dict.finish();
|
||||
|
||||
resources
|
||||
.ext_gs
|
||||
.write(&ctx.references.ext_gs, &mut chunk.indirect(ext_gs_states_ref).dict());
|
||||
|
||||
let mut res_dict = chunk
|
||||
.indirect(resources.reference)
|
||||
.start::<pdf_writer::writers::Resources>();
|
||||
res_dict.pair(Name(b"XObject"), images_ref);
|
||||
res_dict.pair(Name(b"Pattern"), patterns_ref);
|
||||
res_dict.pair(Name(b"ExtGState"), ext_gs_states_ref);
|
||||
res_dict.pair(Name(b"ColorSpace"), color_spaces_ref);
|
||||
|
||||
// TODO: can't this be an indirect reference too?
|
||||
let mut fonts_dict = res_dict.fonts();
|
||||
resources.fonts.write(&ctx.references.fonts, &mut fonts_dict);
|
||||
color_font_remapper.write(&ctx.references.color_fonts, &mut fonts_dict);
|
||||
fonts_dict.finish();
|
||||
|
||||
res_dict.finish();
|
||||
|
||||
let color_spaces = chunk.indirect(color_spaces_ref).dict();
|
||||
resources
|
||||
.colors
|
||||
.write_color_spaces(color_spaces, &ctx.globals.color_functions);
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
used_color_spaces.write_functions(&mut chunk, &ctx.globals.color_functions);
|
||||
|
||||
Ok((chunk, ()))
|
||||
}
|
||||
|
||||
/// Assigns new, consecutive PDF-internal indices to items.
|
||||
pub struct Remapper<T> {
|
||||
/// The prefix to use when naming these resources.
|
||||
prefix: &'static str,
|
||||
/// Forwards from the items to the pdf indices.
|
||||
to_pdf: HashMap<T, usize>,
|
||||
/// Backwards from the pdf indices to the items.
|
||||
to_items: Vec<T>,
|
||||
}
|
||||
|
||||
impl<T> Remapper<T>
|
||||
where
|
||||
T: Eq + Hash + Clone,
|
||||
{
|
||||
/// Create an empty mapping.
|
||||
pub fn new(prefix: &'static str) -> Self {
|
||||
Self { prefix, to_pdf: HashMap::new(), to_items: vec![] }
|
||||
}
|
||||
|
||||
/// Insert an item in the mapping if it was not already present.
|
||||
pub fn insert(&mut self, item: T) -> usize {
|
||||
let to_layout = &mut self.to_items;
|
||||
*self.to_pdf.entry(item.clone()).or_insert_with(|| {
|
||||
let pdf_index = to_layout.len();
|
||||
to_layout.push(item);
|
||||
pdf_index
|
||||
})
|
||||
}
|
||||
|
||||
/// All items in this
|
||||
pub fn items(&self) -> impl Iterator<Item = &T> + '_ {
|
||||
self.to_items.iter()
|
||||
}
|
||||
|
||||
/// Write this list of items in a Resource dictionary.
|
||||
fn write(&self, mapping: &HashMap<T, Ref>, dict: &mut Dict) {
|
||||
for (number, item) in self.items().enumerate() {
|
||||
let name = eco_format!("{}{}", self.prefix, number);
|
||||
let reference = mapping[item];
|
||||
dict.pair(Name(name.as_bytes()), reference);
|
||||
}
|
||||
}
|
||||
}
|
106
crates/typst-pdf/src/shape.rs
Normal file
@ -0,0 +1,106 @@
|
||||
use krilla::geom::{Path, PathBuilder, Rect};
|
||||
use krilla::surface::Surface;
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::visualize::{Geometry, Shape};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::convert::{FrameContext, GlobalContext};
|
||||
use crate::paint;
|
||||
use crate::util::{convert_path, AbsExt, TransformExt};
|
||||
|
||||
#[typst_macros::time(name = "handle shape")]
|
||||
pub(crate) fn handle_shape(
|
||||
fc: &mut FrameContext,
|
||||
shape: &Shape,
|
||||
surface: &mut Surface,
|
||||
gc: &mut GlobalContext,
|
||||
span: Span,
|
||||
) -> SourceResult<()> {
|
||||
surface.set_location(span.into_raw().get());
|
||||
surface.push_transform(&fc.state().transform().to_krilla());
|
||||
|
||||
if let Some(path) = convert_geometry(&shape.geometry) {
|
||||
let fill = if let Some(paint) = &shape.fill {
|
||||
Some(paint::convert_fill(
|
||||
gc,
|
||||
paint,
|
||||
shape.fill_rule,
|
||||
false,
|
||||
surface,
|
||||
fc.state(),
|
||||
shape.geometry.bbox_size(),
|
||||
)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let stroke = shape.stroke.as_ref().and_then(|stroke| {
|
||||
if stroke.thickness.to_f32() > 0.0 {
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
let stroke = if let Some(stroke) = &stroke {
|
||||
let stroke = paint::convert_stroke(
|
||||
gc,
|
||||
stroke,
|
||||
false,
|
||||
surface,
|
||||
fc.state(),
|
||||
shape.geometry.bbox_size(),
|
||||
)?;
|
||||
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Otherwise, krilla will by default fill with a black paint.
|
||||
if fill.is_some() || stroke.is_some() {
|
||||
surface.set_fill(fill);
|
||||
surface.set_stroke(stroke);
|
||||
surface.draw_path(&path);
|
||||
}
|
||||
}
|
||||
|
||||
surface.pop();
|
||||
surface.reset_location();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn convert_geometry(geometry: &Geometry) -> Option<Path> {
|
||||
let mut path_builder = PathBuilder::new();
|
||||
|
||||
match geometry {
|
||||
Geometry::Line(l) => {
|
||||
path_builder.move_to(0.0, 0.0);
|
||||
path_builder.line_to(l.x.to_f32(), l.y.to_f32());
|
||||
}
|
||||
Geometry::Rect(size) => {
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
let rect = if w < 0.0 || h < 0.0 {
|
||||
// krilla doesn't normally allow for negative dimensions, but
|
||||
// Typst supports them, so we apply a transform if needed.
|
||||
let transform =
|
||||
krilla::geom::Transform::from_scale(w.signum(), h.signum());
|
||||
Rect::from_xywh(0.0, 0.0, w.abs(), h.abs())
|
||||
.and_then(|rect| rect.transform(transform))
|
||||
} else {
|
||||
Rect::from_xywh(0.0, 0.0, w, h)
|
||||
};
|
||||
|
||||
if let Some(rect) = rect {
|
||||
path_builder.push_rect(rect);
|
||||
}
|
||||
}
|
||||
Geometry::Curve(c) => {
|
||||
convert_path(c, &mut path_builder);
|
||||
}
|
||||
}
|
||||
|
||||
path_builder.finish()
|
||||
}
|
135
crates/typst-pdf/src/text.rs
Normal file
@ -0,0 +1,135 @@
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
use bytemuck::TransparentWrapper;
|
||||
use krilla::surface::{Location, Surface};
|
||||
use krilla::text::GlyphId;
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::layout::Size;
|
||||
use typst_library::text::{Font, Glyph, TextItem};
|
||||
use typst_library::visualize::FillRule;
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::convert::{FrameContext, GlobalContext};
|
||||
use crate::paint;
|
||||
use crate::util::{display_font, AbsExt, TransformExt};
|
||||
|
||||
#[typst_macros::time(name = "handle text")]
|
||||
pub(crate) fn handle_text(
|
||||
fc: &mut FrameContext,
|
||||
t: &TextItem,
|
||||
surface: &mut Surface,
|
||||
gc: &mut GlobalContext,
|
||||
) -> SourceResult<()> {
|
||||
*gc.languages.entry(t.lang).or_insert(0) += t.glyphs.len();
|
||||
|
||||
let font = convert_font(gc, t.font.clone())?;
|
||||
let fill = paint::convert_fill(
|
||||
gc,
|
||||
&t.fill,
|
||||
FillRule::NonZero,
|
||||
true,
|
||||
surface,
|
||||
fc.state(),
|
||||
Size::zero(),
|
||||
)?;
|
||||
let stroke =
|
||||
if let Some(stroke) = t.stroke.as_ref().map(|s| {
|
||||
paint::convert_stroke(gc, s, true, surface, fc.state(), Size::zero())
|
||||
}) {
|
||||
Some(stroke?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let text = t.text.as_str();
|
||||
let size = t.size;
|
||||
let glyphs: &[PdfGlyph] = TransparentWrapper::wrap_slice(t.glyphs.as_slice());
|
||||
|
||||
surface.push_transform(&fc.state().transform().to_krilla());
|
||||
surface.set_fill(Some(fill));
|
||||
surface.set_stroke(stroke);
|
||||
surface.draw_glyphs(
|
||||
krilla::geom::Point::from_xy(0.0, 0.0),
|
||||
glyphs,
|
||||
font.clone(),
|
||||
text,
|
||||
size.to_f32(),
|
||||
false,
|
||||
);
|
||||
|
||||
surface.pop();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn convert_font(
|
||||
gc: &mut GlobalContext,
|
||||
typst_font: Font,
|
||||
) -> SourceResult<krilla::text::Font> {
|
||||
if let Some(font) = gc.fonts_forward.get(&typst_font) {
|
||||
Ok(font.clone())
|
||||
} else {
|
||||
let font = build_font(typst_font.clone())?;
|
||||
|
||||
gc.fonts_forward.insert(typst_font.clone(), font.clone());
|
||||
gc.fonts_backward.insert(font.clone(), typst_font.clone());
|
||||
|
||||
Ok(font)
|
||||
}
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn build_font(typst_font: Font) -> SourceResult<krilla::text::Font> {
|
||||
let font_data: Arc<dyn AsRef<[u8]> + Send + Sync> =
|
||||
Arc::new(typst_font.data().clone());
|
||||
|
||||
match krilla::text::Font::new(font_data.into(), typst_font.index()) {
|
||||
None => {
|
||||
let font_str = display_font(&typst_font);
|
||||
bail!(Span::detached(), "failed to process font {font_str}");
|
||||
}
|
||||
Some(f) => Ok(f),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(TransparentWrapper, Debug)]
|
||||
#[repr(transparent)]
|
||||
struct PdfGlyph(Glyph);
|
||||
|
||||
impl krilla::text::Glyph for PdfGlyph {
|
||||
#[inline(always)]
|
||||
fn glyph_id(&self) -> GlyphId {
|
||||
GlyphId::new(self.0.id as u32)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn text_range(&self) -> Range<usize> {
|
||||
self.0.range.start as usize..self.0.range.end as usize
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn x_advance(&self, size: f32) -> f32 {
|
||||
// Don't use `Em::at`, because it contains an expensive check whether the result is finite.
|
||||
self.0.x_advance.get() as f32 * size
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn x_offset(&self, size: f32) -> f32 {
|
||||
// Don't use `Em::at`, because it contains an expensive check whether the result is finite.
|
||||
self.0.x_offset.get() as f32 * size
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn y_offset(&self, _: f32) -> f32 {
|
||||
0.0
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn y_advance(&self, _: f32) -> f32 {
|
||||
0.0
|
||||
}
|
||||
|
||||
fn location(&self) -> Option<Location> {
|
||||
Some(self.0.span.0.into_raw().get())
|
||||
}
|
||||
}
|
@ -1,184 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{ColorSpaceOperand, PaintType, TilingType};
|
||||
use pdf_writer::{Filter, Name, Rect, Ref};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::layout::{Abs, Ratio, Transform};
|
||||
use typst_library::visualize::{RelativeTo, Tiling};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use crate::color::PaintEncode;
|
||||
use crate::resources::{Remapper, ResourcesRefs};
|
||||
use crate::{content, transform_to_array, PdfChunk, Resources, WithGlobalRefs};
|
||||
|
||||
/// Writes the actual patterns (tiling patterns) to the PDF.
|
||||
/// This is performed once after writing all pages.
|
||||
pub fn write_tilings(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<PdfTiling, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
let Some(patterns) = &resources.tilings else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for pdf_pattern in patterns.remapper.items() {
|
||||
let PdfTiling { transform, pattern, content, .. } = pdf_pattern;
|
||||
if out.contains_key(pdf_pattern) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tiling = chunk.alloc();
|
||||
out.insert(pdf_pattern.clone(), tiling);
|
||||
|
||||
let mut tiling_pattern = chunk.tiling_pattern(tiling, content);
|
||||
tiling_pattern
|
||||
.tiling_type(TilingType::ConstantSpacing)
|
||||
.paint_type(PaintType::Colored)
|
||||
.bbox(Rect::new(
|
||||
0.0,
|
||||
0.0,
|
||||
pattern.size().x.to_pt() as _,
|
||||
pattern.size().y.to_pt() as _,
|
||||
))
|
||||
.x_step((pattern.size().x + pattern.spacing().x).to_pt() as _)
|
||||
.y_step((pattern.size().y + pattern.spacing().y).to_pt() as _);
|
||||
|
||||
// The actual resource dict will be written in a later step
|
||||
tiling_pattern.pair(Name(b"Resources"), patterns.resources.reference);
|
||||
|
||||
tiling_pattern
|
||||
.matrix(transform_to_array(
|
||||
transform
|
||||
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()))
|
||||
.post_concat(Transform::translate(
|
||||
Abs::zero(),
|
||||
pattern.spacing().y,
|
||||
)),
|
||||
))
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// A pattern and its transform.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct PdfTiling {
|
||||
/// The transform to apply to the pattern.
|
||||
pub transform: Transform,
|
||||
/// The pattern to paint.
|
||||
pub pattern: Tiling,
|
||||
/// The rendered pattern.
|
||||
pub content: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Registers a pattern with the PDF.
|
||||
fn register_pattern(
|
||||
ctx: &mut content::Builder,
|
||||
pattern: &Tiling,
|
||||
on_text: bool,
|
||||
mut transforms: content::Transforms,
|
||||
) -> SourceResult<usize> {
|
||||
let patterns = ctx
|
||||
.resources
|
||||
.tilings
|
||||
.get_or_insert_with(|| Box::new(TilingRemapper::new()));
|
||||
|
||||
// Edge cases for strokes.
|
||||
if transforms.size.x.is_zero() {
|
||||
transforms.size.x = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
if transforms.size.y.is_zero() {
|
||||
transforms.size.y = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
let transform = match pattern.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.transform,
|
||||
RelativeTo::Parent => transforms.container_transform,
|
||||
};
|
||||
|
||||
// Render the body.
|
||||
let content = content::build(
|
||||
ctx.options,
|
||||
&mut patterns.resources,
|
||||
pattern.frame(),
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let pdf_pattern = PdfTiling {
|
||||
transform,
|
||||
pattern: pattern.clone(),
|
||||
content: content.content.wait().clone(),
|
||||
};
|
||||
|
||||
Ok(patterns.remapper.insert(pdf_pattern))
|
||||
}
|
||||
|
||||
impl PaintEncode for Tiling {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||
let id = eco_format!("P{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_fill_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content::Builder,
|
||||
on_text: bool,
|
||||
transforms: content::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||
let id = eco_format!("P{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_stroke_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// De-duplicate patterns and the resources they require to be drawn.
|
||||
pub struct TilingRemapper<R> {
|
||||
/// Pattern de-duplicator.
|
||||
pub remapper: Remapper<PdfTiling>,
|
||||
/// PDF resources that are used by these patterns.
|
||||
pub resources: Resources<R>,
|
||||
}
|
||||
|
||||
impl TilingRemapper<()> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
remapper: Remapper::new("P"),
|
||||
resources: Resources::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate a reference to the resource dictionary of these patterns.
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> TilingRemapper<Ref> {
|
||||
TilingRemapper {
|
||||
remapper: self.remapper,
|
||||
resources: self.resources.with_refs(refs),
|
||||
}
|
||||
}
|
||||
}
|
120
crates/typst-pdf/src/util.rs
Normal file
@ -0,0 +1,120 @@
|
||||
//! Basic utilities for converting typst types to krilla.
|
||||
|
||||
use krilla::geom as kg;
|
||||
use krilla::geom::PathBuilder;
|
||||
use krilla::paint as kp;
|
||||
use typst_library::layout::{Abs, Point, Size, Transform};
|
||||
use typst_library::text::Font;
|
||||
use typst_library::visualize::{Curve, CurveItem, FillRule, LineCap, LineJoin};
|
||||
|
||||
pub(crate) trait SizeExt {
|
||||
fn to_krilla(&self) -> kg::Size;
|
||||
}
|
||||
|
||||
impl SizeExt for Size {
|
||||
fn to_krilla(&self) -> kg::Size {
|
||||
kg::Size::from_wh(self.x.to_f32(), self.y.to_f32()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait PointExt {
|
||||
fn to_krilla(&self) -> kg::Point;
|
||||
}
|
||||
|
||||
impl PointExt for Point {
|
||||
fn to_krilla(&self) -> kg::Point {
|
||||
kg::Point::from_xy(self.x.to_f32(), self.y.to_f32())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait LineCapExt {
|
||||
fn to_krilla(&self) -> kp::LineCap;
|
||||
}
|
||||
|
||||
impl LineCapExt for LineCap {
|
||||
fn to_krilla(&self) -> kp::LineCap {
|
||||
match self {
|
||||
LineCap::Butt => kp::LineCap::Butt,
|
||||
LineCap::Round => kp::LineCap::Round,
|
||||
LineCap::Square => kp::LineCap::Square,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait LineJoinExt {
|
||||
fn to_krilla(&self) -> kp::LineJoin;
|
||||
}
|
||||
|
||||
impl LineJoinExt for LineJoin {
|
||||
fn to_krilla(&self) -> kp::LineJoin {
|
||||
match self {
|
||||
LineJoin::Miter => kp::LineJoin::Miter,
|
||||
LineJoin::Round => kp::LineJoin::Round,
|
||||
LineJoin::Bevel => kp::LineJoin::Bevel,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait TransformExt {
|
||||
fn to_krilla(&self) -> kg::Transform;
|
||||
}
|
||||
|
||||
impl TransformExt for Transform {
|
||||
fn to_krilla(&self) -> kg::Transform {
|
||||
kg::Transform::from_row(
|
||||
self.sx.get() as f32,
|
||||
self.ky.get() as f32,
|
||||
self.kx.get() as f32,
|
||||
self.sy.get() as f32,
|
||||
self.tx.to_f32(),
|
||||
self.ty.to_f32(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait FillRuleExt {
|
||||
fn to_krilla(&self) -> kp::FillRule;
|
||||
}
|
||||
|
||||
impl FillRuleExt for FillRule {
|
||||
fn to_krilla(&self) -> kp::FillRule {
|
||||
match self {
|
||||
FillRule::NonZero => kp::FillRule::NonZero,
|
||||
FillRule::EvenOdd => kp::FillRule::EvenOdd,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) trait AbsExt {
|
||||
fn to_f32(self) -> f32;
|
||||
}
|
||||
|
||||
impl AbsExt for Abs {
|
||||
fn to_f32(self) -> f32 {
|
||||
self.to_pt() as f32
|
||||
}
|
||||
}
|
||||
|
||||
/// Display the font family of a font.
|
||||
pub(crate) fn display_font(font: &Font) -> &str {
|
||||
&font.info().family
|
||||
}
|
||||
|
||||
/// Convert a typst path to a krilla path.
|
||||
pub(crate) fn convert_path(path: &Curve, builder: &mut PathBuilder) {
|
||||
for item in &path.0 {
|
||||
match item {
|
||||
CurveItem::Move(p) => builder.move_to(p.x.to_f32(), p.y.to_f32()),
|
||||
CurveItem::Line(p) => builder.line_to(p.x.to_f32(), p.y.to_f32()),
|
||||
CurveItem::Cubic(p1, p2, p3) => builder.cubic_to(
|
||||
p1.x.to_f32(),
|
||||
p1.y.to_f32(),
|
||||
p2.x.to_f32(),
|
||||
p2.y.to_f32(),
|
||||
p3.x.to_f32(),
|
||||
p3.y.to_f32(),
|
||||
),
|
||||
CurveItem::Close => builder.close(),
|
||||
}
|
||||
}
|
||||
}
|
@ -271,11 +271,9 @@ fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
|
||||
}
|
||||
|
||||
SyntaxKind::Text | SyntaxKind::MathText | SyntaxKind::MathShorthand => {
|
||||
continuable = !p.at(SyntaxKind::MathShorthand)
|
||||
&& matches!(
|
||||
math_class(p.current_text()),
|
||||
None | Some(MathClass::Alphabetic)
|
||||
);
|
||||
// `a(b)/c` parses as `(a(b))/c` if `a` is continuable.
|
||||
continuable = math_class(p.current_text()) == Some(MathClass::Alphabetic)
|
||||
|| p.current_text().chars().all(char::is_alphabetic);
|
||||
if !maybe_delimited(p) {
|
||||
p.eat();
|
||||
}
|
||||
|
@ -498,7 +498,7 @@ impl World for DocWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
Some(FONTS.1[index].clone())
|
||||
FONTS.1.get(index).cloned()
|
||||
}
|
||||
|
||||
fn today(&self, _: Option<i64>) -> Option<Datetime> {
|
||||
|
BIN
tests/ref/grid-rtl-counter.png
Normal file
After Width: | Height: | Size: 272 B |
BIN
tests/ref/grid-rtl-rowspan-counter-equal.png
Normal file
After Width: | Height: | Size: 272 B |
BIN
tests/ref/grid-rtl-rowspan-counter-mixed-1.png
Normal file
After Width: | Height: | Size: 360 B |
BIN
tests/ref/grid-rtl-rowspan-counter-mixed-2.png
Normal file
After Width: | Height: | Size: 361 B |
BIN
tests/ref/grid-rtl-rowspan-counter-unequal-1.png
Normal file
After Width: | Height: | Size: 361 B |
BIN
tests/ref/grid-rtl-rowspan-counter-unequal-2.png
Normal file
After Width: | Height: | Size: 360 B |
BIN
tests/ref/image-exif-rotation.png
Normal file
After Width: | Height: | Size: 1.4 KiB |
BIN
tests/ref/issue-4828-math-number-multi-char.png
Normal file
After Width: | Height: | Size: 465 B |
BIN
tests/ref/issue-6242-tight-list-attach-spacing.png
Normal file
After Width: | Height: | Size: 410 B |
Before Width: | Height: | Size: 200 B |
BIN
tests/ref/math-accent-dotless-disabled.png
Normal file
After Width: | Height: | Size: 311 B |
BIN
tests/ref/math-accent-dotless-set-rule.png
Normal file
After Width: | Height: | Size: 147 B |
BIN
tests/ref/math-accent-flattened.png
Normal file
After Width: | Height: | Size: 464 B |
Before Width: | Height: | Size: 570 B After Width: | Height: | Size: 506 B |
Before Width: | Height: | Size: 984 B After Width: | Height: | Size: 1.0 KiB |
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 3.5 KiB |
BIN
tests/ref/math-mat-vec-cases-unity.png
Normal file
After Width: | Height: | Size: 1.2 KiB |
Before Width: | Height: | Size: 856 B After Width: | Height: | Size: 651 B |
Before Width: | Height: | Size: 2.7 KiB After Width: | Height: | Size: 2.7 KiB |
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
Before Width: | Height: | Size: 573 B After Width: | Height: | Size: 568 B |
@ -67,7 +67,7 @@ impl World for TestWorld {
|
||||
}
|
||||
|
||||
fn font(&self, index: usize) -> Option<Font> {
|
||||
Some(self.base.fonts[index].clone())
|
||||
self.base.fonts.get(index).cloned()
|
||||
}
|
||||
|
||||
fn today(&self, _: Option<i64>) -> Option<Datetime> {
|
||||
|
@ -179,6 +179,10 @@
|
||||
#test((2,).last(), 2)
|
||||
#test((1, 2, 3).first(), 1)
|
||||
#test((1, 2, 3).last(), 3)
|
||||
#test((1, 2).first(default: 99), 1)
|
||||
#test(().first(default: 99), 99)
|
||||
#test((1, 2).last(default: 99), 2)
|
||||
#test(().last(default: 99), 99)
|
||||
|
||||
--- array-first-empty ---
|
||||
// Error: 2-12 array is empty
|
||||
@ -355,6 +359,12 @@
|
||||
#test((2, 1, 3, 10, 5, 8, 6, -7, 2).sorted(), (-7, 1, 2, 2, 3, 5, 6, 8, 10))
|
||||
#test((2, 1, 3, -10, -5, 8, 6, -7, 2).sorted(key: x => x), (-10, -7, -5, 1, 2, 2, 3, 6, 8))
|
||||
#test((2, 1, 3, -10, -5, 8, 6, -7, 2).sorted(key: x => x * x), (1, 2, 2, 3, -5, 6, -7, 8, -10))
|
||||
#test(("I", "the", "hi", "text").sorted(by: (x, y) => x.len() < y.len()), ("I", "hi", "the", "text"))
|
||||
#test(("I", "the", "hi", "text").sorted(key: x => x.len(), by: (x, y) => y < x), ("text", "the", "hi", "I"))
|
||||
|
||||
--- array-sorted-invalid-by-function ---
|
||||
// Error: 2-39 expected boolean from `by` function, got string
|
||||
#(1, 2, 3).sorted(by: (_, _) => "hmm")
|
||||
|
||||
--- array-sorted-key-function-positional-1 ---
|
||||
// Error: 12-18 unexpected argument
|
||||
|
@ -52,3 +52,9 @@ _Tiger!_
|
||||
#eval(mode: "math", "f(a) = cases(a + b\, space space x >= 3,a + b\, space space x = 5)")
|
||||
|
||||
$f(a) = cases(a + b\, space space x >= 3,a + b\, space space x = 5)$
|
||||
|
||||
--- issue-6067-eval-warnings ---
|
||||
// Test that eval shows warnings from the executed code.
|
||||
// Warning: 7-11 no text within stars
|
||||
// Hint: 7-11 using multiple consecutive stars (e.g. **) has no additional effect
|
||||
#eval("**", mode: "markup")
|
||||
|
@ -193,3 +193,143 @@
|
||||
),
|
||||
..range(0, 10).map(i => ([\##i], table.cell(stroke: green)[123], table.cell(stroke: blue)[456], [789], [?], table.hline(start: 4, end: 5, stroke: red))).flatten()
|
||||
)
|
||||
|
||||
--- grid-rtl-counter ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
[
|
||||
a: // should produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
],
|
||||
[
|
||||
b: // should produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
],
|
||||
)
|
||||
|
||||
--- grid-rtl-rowspan-counter-equal ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
grid.cell(rowspan: 2, [
|
||||
a: // should produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
grid.cell(rowspan: 2, [
|
||||
b: // should produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
)
|
||||
|
||||
--- grid-rtl-rowspan-counter-unequal-1 ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
grid.cell(rowspan: 5, [
|
||||
b: // will produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
grid.cell(rowspan: 2, [
|
||||
a: // will produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
grid.cell(rowspan: 3, [
|
||||
c: // will produce 3
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
)
|
||||
|
||||
--- grid-rtl-rowspan-counter-unequal-2 ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
grid.cell(rowspan: 2, [
|
||||
a: // will produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
grid.cell(rowspan: 5, [
|
||||
b: // will produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
grid.cell(rowspan: 3, [
|
||||
c: // will produce 3
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
)
|
||||
|
||||
--- grid-rtl-rowspan-counter-mixed-1 ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
[
|
||||
a: // will produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
],
|
||||
grid.cell(rowspan: 2, [
|
||||
b: // will produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
[
|
||||
c: // will produce 3
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
],
|
||||
)
|
||||
|
||||
--- grid-rtl-rowspan-counter-mixed-2 ---
|
||||
// Test interaction between RTL and counters
|
||||
#set text(dir: rtl)
|
||||
#let test = counter("test")
|
||||
#grid(
|
||||
columns: (1fr, 1fr),
|
||||
inset: 5pt,
|
||||
align: center,
|
||||
grid.cell(rowspan: 2, [
|
||||
b: // will produce 2
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]),
|
||||
[
|
||||
a: // will produce 1
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
],
|
||||
[
|
||||
c: // will produce 3
|
||||
#test.step()
|
||||
#context test.get().first()
|
||||
]
|
||||
)
|
||||
|
@ -42,3 +42,19 @@ $tilde(U, size: #1.1em), x^tilde(U, size: #1.1em), sscript(tilde(U, size: #1.1em
|
||||
macron(bb(#c)), dot(cal(#c)), diaer(upright(#c)), breve(bold(#c)),
|
||||
circle(bold(upright(#c))), caron(upright(sans(#c))), arrow(bold(frak(#c)))$
|
||||
$test(i) \ test(j)$
|
||||
|
||||
--- math-accent-dotless-disabled ---
|
||||
// Test disabling the dotless glyph variants.
|
||||
$hat(i), hat(i, dotless: #false), accent(j, tilde), accent(j, tilde, dotless: #false)$
|
||||
|
||||
--- math-accent-dotless-set-rule ---
|
||||
#set math.accent(dotless: false)
|
||||
$ hat(i) $
|
||||
|
||||
--- math-accent-flattened ---
|
||||
// Test flattened accent glyph variants.
|
||||
#show math.equation: set text(font: "STIX Two Math")
|
||||
$hat(a) hat(A)$
|
||||
$tilde(w) tilde(W)$
|
||||
$grave(i) grave(j)$
|
||||
$grave(I) grave(J)$
|
||||
|
@ -17,6 +17,6 @@ $ x = cases(1, 2) $
|
||||
$ cases(a, b, c) $
|
||||
|
||||
--- math-cases-linebreaks ---
|
||||
// Currently linebreaks are equivalent to commas, though this behaviour may
|
||||
// change in the future.
|
||||
// Warning: 40-49 linebreaks are ignored in branches
|
||||
// Hint: 40-49 use commas instead to separate each line
|
||||
$ cases(a, b, c) cases(reverse: #true, a \ b \ c) $
|
||||
|
@ -37,8 +37,8 @@ $ 1/2/3 = (1/2)/3 = 1/(2/3) $
|
||||
// Test precedence.
|
||||
$ a_1/b_2, 1/f(x), zeta(x)/2, "foo"[|x|]/2 \
|
||||
1.2/3.7, 2.3^3.4 \
|
||||
🏳️🌈[x]/2, f [x]/2, phi [x]/2, 🏳️🌈 [x]/2 \
|
||||
+[x]/2, 1(x)/2, 2[x]/2 \
|
||||
f [x]/2, phi [x]/2 \
|
||||
+[x]/2, 1(x)/2, 2[x]/2, 🏳️🌈[x]/2 \
|
||||
(a)b/2, b(a)[b]/2 \
|
||||
n!/2, 5!/2, n !/2, 1/n!, 1/5! $
|
||||
|
||||
|
@ -256,10 +256,17 @@ $ mat(delim: #(none, "["), 1, 2; 3, 4) $
|
||||
$ mat(delim: #(sym.angle.r, sym.bracket.double.r), 1, 2; 3, 4) $
|
||||
|
||||
--- math-mat-linebreaks ---
|
||||
// Unlike cases and vectors, linebreaks are discarded in matrices. This
|
||||
// behaviour may change in the future.
|
||||
// Warning: 20-29 linebreaks are ignored in cells
|
||||
// Hint: 20-29 use commas instead to separate each line
|
||||
$ mat(a; b; c) mat(a \ b \ c) $
|
||||
|
||||
--- math-mat-vec-cases-unity ---
|
||||
// Test that matrices, vectors, and cases are all laid out the same.
|
||||
$ mat(z_(n_p); a^2)
|
||||
vec(z_(n_p), a^2)
|
||||
cases(reverse: #true, delim: \(, z_(n_p), a^2)
|
||||
cases(delim: \(, z_(n_p), a^2) $
|
||||
|
||||
--- issue-1617-mat-align ---
|
||||
#set page(width: auto)
|
||||
$ mat(a, b; c, d) mat(x; y) $
|
||||
|
@ -28,6 +28,10 @@ $ dot \ dots \ ast \ tilde \ star $
|
||||
$floor(phi.alt.)$
|
||||
$floor(phi.alt. )$
|
||||
|
||||
--- issue-4828-math-number-multi-char ---
|
||||
// Numbers should parse the same regardless of number of characters.
|
||||
$1/2(x)$ vs. $1/10(x)$
|
||||
|
||||
--- math-unclosed ---
|
||||
// Error: 1-2 unclosed delimiter
|
||||
$a
|
||||
|