Merge branch 'main' into warn-last-show

This commit is contained in:
T0mstone 2025-06-11 14:16:42 +02:00
commit ea3ca30a65
432 changed files with 10261 additions and 8476 deletions

View File

@ -40,7 +40,7 @@ jobs:
sudo dpkg --add-architecture i386 sudo dpkg --add-architecture i386
sudo apt update sudo apt update
sudo apt install -y gcc-multilib libssl-dev:i386 pkg-config:i386 sudo apt install -y gcc-multilib libssl-dev:i386 pkg-config:i386
- uses: dtolnay/rust-toolchain@1.85.0 - uses: dtolnay/rust-toolchain@1.87.0
with: with:
targets: ${{ matrix.bits == 32 && 'i686-unknown-linux-gnu' || '' }} targets: ${{ matrix.bits == 32 && 'i686-unknown-linux-gnu' || '' }}
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
@ -73,7 +73,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.85.0 - uses: dtolnay/rust-toolchain@1.87.0
with: with:
components: clippy, rustfmt components: clippy, rustfmt
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2

View File

@ -44,7 +44,7 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@1.85.0 - uses: dtolnay/rust-toolchain@1.87.0
with: with:
target: ${{ matrix.target }} target: ${{ matrix.target }}

245
Cargo.lock generated
View File

@ -217,6 +217,20 @@ name = "bytemuck"
version = "1.21.0" version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "byteorder" name = "byteorder"
@ -494,9 +508,9 @@ dependencies = [
[[package]] [[package]]
name = "crossbeam-channel" name = "crossbeam-channel"
version = "0.5.14" version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
@ -735,11 +749,12 @@ dependencies = [
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "1.0.35" version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
dependencies = [ dependencies = [
"crc32fast", "crc32fast",
"libz-rs-sys",
"miniz_oxide", "miniz_oxide",
] ]
@ -749,6 +764,15 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]] [[package]]
name = "fnv" name = "fnv"
version = "1.0.7" version = "1.0.7"
@ -761,6 +785,15 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]]
name = "font-types"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa6a5e5a77b5f3f7f9e32879f484aa5b3632ddfbe568a16266c904a6f32cdaf"
dependencies = [
"bytemuck",
]
[[package]] [[package]]
name = "fontconfig-parser" name = "fontconfig-parser"
version = "0.5.7" version = "0.5.7"
@ -829,6 +862,15 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]] [[package]]
name = "getopts" name = "getopts"
version = "0.2.21" version = "0.2.21"
@ -871,6 +913,12 @@ dependencies = [
"weezl", "weezl",
] ]
[[package]]
name = "glidesort"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2e102e6eb644d3e0b186fc161e4460417880a0a0b87d235f2e5b8fb30f2e9e0"
[[package]] [[package]]
name = "half" name = "half"
version = "2.4.1" version = "2.4.1"
@ -966,7 +1014,7 @@ checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
dependencies = [ dependencies = [
"displaydoc", "displaydoc",
"serde", "serde",
"yoke", "yoke 0.7.5",
"zerofrom", "zerofrom",
"zerovec", "zerovec",
] ]
@ -1064,7 +1112,7 @@ dependencies = [
"stable_deref_trait", "stable_deref_trait",
"tinystr", "tinystr",
"writeable", "writeable",
"yoke", "yoke 0.7.5",
"zerofrom", "zerofrom",
"zerovec", "zerovec",
] ]
@ -1167,6 +1215,7 @@ dependencies = [
"byteorder-lite", "byteorder-lite",
"color_quant", "color_quant",
"gif", "gif",
"image-webp",
"num-traits", "num-traits",
"png", "png",
"zune-core", "zune-core",
@ -1211,6 +1260,12 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "infer"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a588916bfdfd92e71cacef98a63d9b1f0d74d6599980d11894290e7ddefffcf7"
[[package]] [[package]]
name = "inotify" name = "inotify"
version = "0.11.0" version = "0.11.0"
@ -1310,6 +1365,50 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "krilla"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69ee6128ebf52d7ce684613b6431ead2959f2be9ff8cf776eeaaad0427c953e9"
dependencies = [
"base64",
"bumpalo",
"comemo",
"flate2",
"float-cmp 0.10.0",
"fxhash",
"gif",
"image-webp",
"imagesize",
"once_cell",
"pdf-writer",
"png",
"rayon",
"rustybuzz",
"siphasher",
"skrifa",
"subsetter",
"tiny-skia-path",
"xmp-writer",
"yoke 0.8.0",
"zune-jpeg",
]
[[package]]
name = "krilla-svg"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3462989578155cf620ef8035f8921533cc95c28e2a0c75de172f7219e6aba84e"
dependencies = [
"flate2",
"fontdb",
"krilla",
"png",
"resvg",
"tiny-skia",
"usvg",
]
[[package]] [[package]]
name = "kurbo" name = "kurbo"
version = "0.11.1" version = "0.11.1"
@ -1371,6 +1470,15 @@ dependencies = [
"redox_syscall", "redox_syscall",
] ]
[[package]]
name = "libz-rs-sys"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d"
dependencies = [
"zlib-rs",
]
[[package]] [[package]]
name = "linked-hash-map" name = "linked-hash-map"
version = "0.5.6" version = "0.5.6"
@ -1458,9 +1566,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]] [[package]]
name = "miniz_oxide" name = "miniz_oxide"
version = "0.8.3" version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
dependencies = [ dependencies = [
"adler2", "adler2",
"simd-adler32", "simd-adler32",
@ -1601,9 +1709,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl" name = "openssl"
version = "0.10.70" version = "0.10.72"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6" checksum = "fedfea7d58a1f73118430a55da6a286e7b044961736ce96a16a17068ea25e5da"
dependencies = [ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"cfg-if", "cfg-if",
@ -1642,9 +1750,9 @@ dependencies = [
[[package]] [[package]]
name = "openssl-sys" name = "openssl-sys"
version = "0.9.105" version = "0.9.107"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b22d5b84be05a8d6947c7cb71f7c849aa0f112acd4bf51c2a7c1c988ac0a9dc" checksum = "8288979acd84749c744a9014b4382d42b8f7b2592847b5afb2ed29e5d16ede07"
dependencies = [ dependencies = [
"cc", "cc",
"libc", "libc",
@ -1738,9 +1846,9 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]] [[package]]
name = "pdf-writer" name = "pdf-writer"
version = "0.12.1" version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5df03c7d216de06f93f398ef06f1385a60f2c597bb96f8195c8d98e08a26b1d5" checksum = "3ea27c5015ab81753fc61e49f8cde74999346605ee148bb20008ef3d3150e0dc"
dependencies = [ dependencies = [
"bitflags 2.8.0", "bitflags 2.8.0",
"itoa", "itoa",
@ -1997,6 +2105,16 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "read-fonts"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "600e807b48ac55bad68a8cb75cc3c7739f139b9248f7e003e01e080f589b5288"
dependencies = [
"bytemuck",
"font-types",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.5.8" version = "0.5.8"
@ -2315,6 +2433,16 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "skrifa"
version = "0.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fa1e5622e4f7b98877e8a19890efddcac1230cec6198bd9de91ec0e00010dc8"
dependencies = [
"bytemuck",
"read-fonts",
]
[[package]] [[package]]
name = "slotmap" name = "slotmap"
version = "1.0.7" version = "1.0.7"
@ -2361,7 +2489,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731" checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731"
dependencies = [ dependencies = [
"float-cmp", "float-cmp 0.9.0",
] ]
[[package]] [[package]]
@ -2404,28 +2532,11 @@ dependencies = [
[[package]] [[package]]
name = "subsetter" name = "subsetter"
version = "0.2.0" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74f98178f34057d4d4de93d68104007c6dea4dfac930204a69ab4622daefa648" checksum = "35539e8de3dcce8dd0c01f3575f85db1e5ac1aea1b996d2d09d89f148bc91497"
[[package]]
name = "svg2pdf"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e50dc062439cc1a396181059c80932a6e6bd731b130e674c597c0c8874b6df22"
dependencies = [ dependencies = [
"fontdb", "fxhash",
"image",
"log",
"miniz_oxide",
"once_cell",
"pdf-writer",
"resvg",
"siphasher",
"subsetter",
"tiny-skia",
"ttf-parser",
"usvg",
] ]
[[package]] [[package]]
@ -2753,7 +2864,7 @@ dependencies = [
[[package]] [[package]]
name = "typst-assets" name = "typst-assets"
version = "0.13.1" version = "0.13.1"
source = "git+https://github.com/typst/typst-assets?rev=ab1295f#ab1295ff896444e51902e03c2669955e1d73604a" source = "git+https://github.com/typst/typst-assets?rev=c74e539#c74e539b090070a0c66fd007c550f5b6d3b724bd"
[[package]] [[package]]
name = "typst-cli" name = "typst-cli"
@ -2803,7 +2914,7 @@ dependencies = [
[[package]] [[package]]
name = "typst-dev-assets" name = "typst-dev-assets"
version = "0.13.1" version = "0.13.1"
source = "git+https://github.com/typst/typst-dev-assets?rev=9879589#9879589f4b3247b12c5e694d0d7fa86d4d8a198e" source = "git+https://github.com/typst/typst-dev-assets?rev=fddbf8b#fddbf8b99506bc370ac0edcd4959add603a7fc92"
[[package]] [[package]]
name = "typst-docs" name = "typst-docs"
@ -2928,6 +3039,7 @@ dependencies = [
"icu_provider_blob", "icu_provider_blob",
"icu_segmenter", "icu_segmenter",
"kurbo", "kurbo",
"memchr",
"rustybuzz", "rustybuzz",
"smallvec", "smallvec",
"ttf-parser", "ttf-parser",
@ -2958,6 +3070,7 @@ dependencies = [
"ecow", "ecow",
"flate2", "flate2",
"fontdb", "fontdb",
"glidesort",
"hayagriva", "hayagriva",
"icu_properties", "icu_properties",
"icu_provider", "icu_provider",
@ -3000,6 +3113,7 @@ dependencies = [
"unicode-segmentation", "unicode-segmentation",
"unscanny", "unscanny",
"usvg", "usvg",
"utf8_iter",
"wasmi", "wasmi",
"xmlwriter", "xmlwriter",
] ]
@ -3018,26 +3132,20 @@ dependencies = [
name = "typst-pdf" name = "typst-pdf"
version = "0.13.1" version = "0.13.1"
dependencies = [ dependencies = [
"arrayvec",
"base64",
"bytemuck", "bytemuck",
"comemo", "comemo",
"ecow", "ecow",
"image", "image",
"indexmap 2.7.1", "infer",
"miniz_oxide", "krilla",
"pdf-writer", "krilla-svg",
"serde", "serde",
"subsetter",
"svg2pdf",
"ttf-parser",
"typst-assets", "typst-assets",
"typst-library", "typst-library",
"typst-macros", "typst-macros",
"typst-syntax", "typst-syntax",
"typst-timing", "typst-timing",
"typst-utils", "typst-utils",
"xmp-writer",
] ]
[[package]] [[package]]
@ -3094,6 +3202,7 @@ dependencies = [
name = "typst-syntax" name = "typst-syntax"
version = "0.13.1" version = "0.13.1"
dependencies = [ dependencies = [
"comemo",
"ecow", "ecow",
"serde", "serde",
"toml", "toml",
@ -3661,9 +3770,9 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]] [[package]]
name = "xmp-writer" name = "xmp-writer"
version = "0.3.1" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb5954c9ca6dcc869e98d3e42760ed9dab08f3e70212b31d7ab8ae7f3b7a487" checksum = "ce9e2f4a404d9ebffc0a9832cf4f50907220ba3d7fffa9099261a5cab52f2dd7"
[[package]] [[package]]
name = "xz2" name = "xz2"
@ -3701,7 +3810,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
dependencies = [ dependencies = [
"serde", "serde",
"stable_deref_trait", "stable_deref_trait",
"yoke-derive", "yoke-derive 0.7.5",
"zerofrom",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.8.0",
"zerofrom", "zerofrom",
] ]
@ -3717,6 +3838,18 @@ dependencies = [
"synstructure", "synstructure",
] ]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]] [[package]]
name = "zerocopy" name = "zerocopy"
version = "0.7.35" version = "0.7.35"
@ -3778,7 +3911,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
dependencies = [ dependencies = [
"serde", "serde",
"yoke", "yoke 0.7.5",
"zerofrom", "zerofrom",
"zerovec-derive", "zerovec-derive",
] ]
@ -3796,21 +3929,25 @@ dependencies = [
[[package]] [[package]]
name = "zip" name = "zip"
version = "2.2.2" version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae9c1ea7b3a5e1f4b922ff856a129881167511563dc219869afe3787fc0c1a45" checksum = "27c03817464f64e23f6f37574b4fdc8cf65925b5bfd2b0f2aedf959791941f88"
dependencies = [ dependencies = [
"arbitrary", "arbitrary",
"crc32fast", "crc32fast",
"crossbeam-utils", "crossbeam-utils",
"displaydoc",
"flate2", "flate2",
"indexmap 2.7.1", "indexmap 2.7.1",
"memchr", "memchr",
"thiserror 2.0.11",
"zopfli", "zopfli",
] ]
[[package]]
name = "zlib-rs"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05"
[[package]] [[package]]
name = "zopfli" name = "zopfli"
version = "0.8.1" version = "0.8.1"

View File

@ -32,8 +32,8 @@ typst-svg = { path = "crates/typst-svg", version = "0.13.1" }
typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" } typst-syntax = { path = "crates/typst-syntax", version = "0.13.1" }
typst-timing = { path = "crates/typst-timing", version = "0.13.1" } typst-timing = { path = "crates/typst-timing", version = "0.13.1" }
typst-utils = { path = "crates/typst-utils", version = "0.13.1" } typst-utils = { path = "crates/typst-utils", version = "0.13.1" }
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "ab1295f" } typst-assets = { git = "https://github.com/typst/typst-assets", rev = "c74e539" }
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "9879589" } typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "fddbf8b" }
arrayvec = "0.7.4" arrayvec = "0.7.4"
az = "1.2" az = "1.2"
base64 = "0.22" base64 = "0.22"
@ -59,6 +59,7 @@ fastrand = "2.3"
flate2 = "1" flate2 = "1"
fontdb = { version = "0.23", default-features = false } fontdb = { version = "0.23", default-features = false }
fs_extra = "1.3" fs_extra = "1.3"
glidesort = "0.1.2"
hayagriva = "0.8.1" hayagriva = "0.8.1"
heck = "0.5" heck = "0.5"
hypher = "0.1.4" hypher = "0.1.4"
@ -68,24 +69,25 @@ icu_provider_adapters = "1.4"
icu_provider_blob = "1.4" icu_provider_blob = "1.4"
icu_segmenter = { version = "1.4", features = ["serde"] } icu_segmenter = { version = "1.4", features = ["serde"] }
if_chain = "1" if_chain = "1"
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] } image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif", "webp"] }
indexmap = { version = "2", features = ["serde"] } indexmap = { version = "2", features = ["serde"] }
infer = { version = "0.19.0", default-features = false }
kamadak-exif = "0.6" kamadak-exif = "0.6"
krilla = { version = "0.4.0", default-features = false, features = ["raster-images", "comemo", "rayon"] }
krilla-svg = "0.1.0"
kurbo = "0.11" kurbo = "0.11"
libfuzzer-sys = "0.4" libfuzzer-sys = "0.4"
lipsum = "0.9" lipsum = "0.9"
memchr = "2" memchr = "2"
miniz_oxide = "0.8"
native-tls = "0.2" native-tls = "0.2"
notify = "8" notify = "8"
once_cell = "1" once_cell = "1"
open = "5.0.1" open = "5.0.1"
openssl = "0.10" openssl = "0.10.72"
oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] } oxipng = { version = "9.0", default-features = false, features = ["filetime", "parallel", "zopfli"] }
palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] } palette = { version = "0.7.3", default-features = false, features = ["approx", "libm"] }
parking_lot = "0.12.1" parking_lot = "0.12.1"
pathdiff = "0.2" pathdiff = "0.2"
pdf-writer = "0.12.1"
phf = { version = "0.11", features = ["macros"] } phf = { version = "0.11", features = ["macros"] }
pixglyph = "0.6" pixglyph = "0.6"
png = "0.17" png = "0.17"
@ -112,8 +114,6 @@ sigpipe = "0.1"
siphasher = "1" siphasher = "1"
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] } smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
stacker = "0.1.15" stacker = "0.1.15"
subsetter = "0.2"
svg2pdf = "0.13"
syn = { version = "2", features = ["full", "extra-traits"] } syn = { version = "2", features = ["full", "extra-traits"] }
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] } syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
tar = "0.4" tar = "0.4"
@ -135,15 +135,15 @@ unicode-segmentation = "1"
unscanny = "0.1" unscanny = "0.1"
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] } ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
usvg = { version = "0.45", default-features = false, features = ["text"] } usvg = { version = "0.45", default-features = false, features = ["text"] }
utf8_iter = "1.0.4"
walkdir = "2" walkdir = "2"
wasmi = "0.40.0" wasmi = "0.40.0"
web-sys = "0.3" web-sys = "0.3"
xmlparser = "0.13.5" xmlparser = "0.13.5"
xmlwriter = "0.1.0" xmlwriter = "0.1.0"
xmp-writer = "0.3.1"
xz2 = { version = "0.1", features = ["static"] } xz2 = { version = "0.1", features = ["static"] }
yaml-front-matter = "0.1" yaml-front-matter = "0.1"
zip = { version = "2", default-features = false, features = ["deflate"] } zip = { version = "2.5", default-features = false, features = ["deflate"] }
[profile.dev.package."*"] [profile.dev.package."*"]
opt-level = 2 opt-level = 2

View File

@ -177,22 +177,22 @@ If you prefer an integrated IDE-like experience with autocompletion and instant
preview, you can also check out [Typst's free web app][app]. preview, you can also check out [Typst's free web app][app].
## Community ## Community
The main place where the community gathers is our [Discord server][discord]. The main places where the community gathers are our [Forum][forum] and our
Feel free to join there to ask questions, help out others, share cool things [Discord server][discord]. The Forum is a great place to ask questions, help
you created with Typst, or just to chat. others, and share cool things you created with Typst. The Discord server is more
suitable for quicker questions, discussions about contributing, or just to chat.
We'd be happy to see you there!
Aside from that there are a few places where you can find things built by [Typst Universe][universe] is where the community shares templates and packages.
the community: If you want to share your own creations, you can submit them to our
[package repository][packages].
- The official [package list](https://typst.app/docs/packages)
- The [Awesome Typst](https://github.com/qjcg/awesome-typst) repository
If you had a bad experience in our community, please [reach out to us][contact]. If you had a bad experience in our community, please [reach out to us][contact].
## Contributing ## Contributing
We would love to see contributions from the community. If you experience bugs, We love to see contributions from the community. If you experience bugs, feel
feel free to open an issue. If you would like to implement a new feature or bug free to open an issue. If you would like to implement a new feature or bug fix,
fix, please follow the steps outlined in the [contribution guide][contributing]. please follow the steps outlined in the [contribution guide][contributing].
To build Typst yourself, first ensure that you have the To build Typst yourself, first ensure that you have the
[latest stable Rust][rust] installed. Then, clone this repository and build the [latest stable Rust][rust] installed. Then, clone this repository and build the
@ -243,6 +243,8 @@ instant preview. To achieve these goals, we follow three core design principles:
[docs]: https://typst.app/docs/ [docs]: https://typst.app/docs/
[app]: https://typst.app/ [app]: https://typst.app/
[discord]: https://discord.gg/2uDybryKPe [discord]: https://discord.gg/2uDybryKPe
[forum]: https://forum.typst.app/
[universe]: https://typst.app/universe/
[tutorial]: https://typst.app/docs/tutorial/ [tutorial]: https://typst.app/docs/tutorial/
[show]: https://typst.app/docs/reference/styling/#show-rules [show]: https://typst.app/docs/reference/styling/#show-rules
[math]: https://typst.app/docs/reference/math/ [math]: https://typst.app/docs/reference/math/

View File

@ -361,7 +361,7 @@ pub struct FontArgs {
/// Ensures system fonts won't be searched, unless explicitly included via /// Ensures system fonts won't be searched, unless explicitly included via
/// `--font-path`. /// `--font-path`.
#[arg(long)] #[arg(long, env = "TYPST_IGNORE_SYSTEM_FONTS")]
pub ignore_system_fonts: bool, pub ignore_system_fonts: bool,
} }
@ -467,15 +467,45 @@ display_possible_values!(Feature);
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)] #[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
pub enum PdfStandard { pub enum PdfStandard {
/// PDF 1.4.
#[value(name = "1.4")]
V_1_4,
/// PDF 1.5.
#[value(name = "1.5")]
V_1_5,
/// PDF 1.5.
#[value(name = "1.6")]
V_1_6,
/// PDF 1.7. /// PDF 1.7.
#[value(name = "1.7")] #[value(name = "1.7")]
V_1_7, V_1_7,
/// PDF 2.0.
#[value(name = "2.0")]
V_2_0,
/// PDF/A-1b.
#[value(name = "a-1b")]
A_1b,
/// PDF/A-2b. /// PDF/A-2b.
#[value(name = "a-2b")] #[value(name = "a-2b")]
A_2b, A_2b,
/// PDF/A-3b. /// PDF/A-2u.
#[value(name = "a-2u")]
A_2u,
/// PDF/A-3u.
#[value(name = "a-3b")] #[value(name = "a-3b")]
A_3b, A_3b,
/// PDF/A-3u.
#[value(name = "a-3u")]
A_3u,
/// PDF/A-4.
#[value(name = "a-4")]
A_4,
/// PDF/A-4f.
#[value(name = "a-4f")]
A_4f,
/// PDF/A-4e.
#[value(name = "a-4e")]
A_4e,
} }
display_possible_values!(PdfStandard); display_possible_values!(PdfStandard);

View File

@ -16,7 +16,7 @@ use typst::diag::{
use typst::foundations::{Datetime, Smart}; use typst::foundations::{Datetime, Smart};
use typst::html::HtmlDocument; use typst::html::HtmlDocument;
use typst::layout::{Frame, Page, PageRanges, PagedDocument}; use typst::layout::{Frame, Page, PageRanges, PagedDocument};
use typst::syntax::{FileId, Source, Span}; use typst::syntax::{FileId, Lines, Span};
use typst::WorldExt; use typst::WorldExt;
use typst_pdf::{PdfOptions, PdfStandards, Timestamp}; use typst_pdf::{PdfOptions, PdfStandards, Timestamp};
@ -63,8 +63,7 @@ pub struct CompileConfig {
/// Opens the output file with the default viewer or a specific program after /// Opens the output file with the default viewer or a specific program after
/// compilation. /// compilation.
pub open: Option<Option<String>>, pub open: Option<Option<String>>,
/// One (or multiple comma-separated) PDF standards that Typst will enforce /// A list of standards the PDF should conform to.
/// conformance with.
pub pdf_standards: PdfStandards, pub pdf_standards: PdfStandards,
/// A path to write a Makefile rule describing the current compilation. /// A path to write a Makefile rule describing the current compilation.
pub make_deps: Option<PathBuf>, pub make_deps: Option<PathBuf>,
@ -130,18 +129,9 @@ impl CompileConfig {
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect()) PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
}); });
let pdf_standards = { let pdf_standards = PdfStandards::new(
let list = args &args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
.pdf_standard )?;
.iter()
.map(|standard| match standard {
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
})
.collect::<Vec<_>>();
PdfStandards::new(&list)?
};
#[cfg(feature = "http-server")] #[cfg(feature = "http-server")]
let server = match watch { let server = match watch {
@ -295,6 +285,7 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
}) })
} }
}; };
let options = PdfOptions { let options = PdfOptions {
ident: Smart::Auto, ident: Smart::Auto,
timestamp, timestamp,
@ -705,7 +696,7 @@ fn label(world: &SystemWorld, span: Span) -> Option<Label<FileId>> {
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld { impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = FileId; type FileId = FileId;
type Name = String; type Name = String;
type Source = Source; type Source = Lines<String>;
fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> { fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
let vpath = id.vpath(); let vpath = id.vpath();
@ -765,3 +756,23 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
}) })
} }
} }
impl From<PdfStandard> for typst_pdf::PdfStandard {
fn from(standard: PdfStandard) -> Self {
match standard {
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
}
}
}

View File

@ -2,6 +2,7 @@ use comemo::Track;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use serde::Serialize; use serde::Serialize;
use typst::diag::{bail, HintedStrResult, StrResult, Warned}; use typst::diag::{bail, HintedStrResult, StrResult, Warned};
use typst::engine::Sink;
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope}; use typst::foundations::{Content, IntoValue, LocatableSelector, Scope};
use typst::layout::PagedDocument; use typst::layout::PagedDocument;
use typst::syntax::Span; use typst::syntax::Span;
@ -58,6 +59,8 @@ fn retrieve(
let selector = eval_string( let selector = eval_string(
&typst::ROUTINES, &typst::ROUTINES,
world.track(), world.track(),
// TODO: propagate warnings
Sink::new().track_mut(),
&command.selector, &command.selector,
Span::detached(), Span::detached(),
EvalMode::Code, EvalMode::Code,

View File

@ -85,6 +85,6 @@ fn resolve_span(world: &SystemWorld, span: Span) -> Option<(String, u32)> {
let id = span.id()?; let id = span.id()?;
let source = world.source(id).ok()?; let source = world.source(id).ok()?;
let range = source.range(span)?; let range = source.range(span)?;
let line = source.byte_to_line(range.start)?; let line = source.lines().byte_to_line(range.start)?;
Some((format!("{id:?}"), line as u32 + 1)) Some((format!("{id:?}"), line as u32 + 1))
} }

View File

@ -9,7 +9,7 @@ use ecow::{eco_format, EcoString};
use parking_lot::Mutex; use parking_lot::Mutex;
use typst::diag::{FileError, FileResult}; use typst::diag::{FileError, FileResult};
use typst::foundations::{Bytes, Datetime, Dict, IntoValue}; use typst::foundations::{Bytes, Datetime, Dict, IntoValue};
use typst::syntax::{FileId, Source, VirtualPath}; use typst::syntax::{FileId, Lines, Source, VirtualPath};
use typst::text::{Font, FontBook}; use typst::text::{Font, FontBook};
use typst::utils::LazyHash; use typst::utils::LazyHash;
use typst::{Library, World}; use typst::{Library, World};
@ -181,10 +181,20 @@ impl SystemWorld {
} }
} }
/// Lookup a source file by id. /// Lookup line metadata for a file by id.
#[track_caller] #[track_caller]
pub fn lookup(&self, id: FileId) -> Source { pub fn lookup(&self, id: FileId) -> Lines<String> {
self.source(id).expect("file id does not point to any source file") self.slot(id, |slot| {
if let Some(source) = slot.source.get() {
let source = source.as_ref().expect("file is not valid");
source.lines().clone()
} else if let Some(bytes) = slot.file.get() {
let bytes = bytes.as_ref().expect("file is not valid");
Lines::try_from(bytes).expect("file is not valid utf-8")
} else {
panic!("file id does not point to any source file");
}
})
} }
} }
@ -210,7 +220,9 @@ impl World for SystemWorld {
} }
fn font(&self, index: usize) -> Option<Font> { fn font(&self, index: usize) -> Option<Font> {
self.fonts[index].get() // comemo's validation may invoke this function with an invalid index. This is
// impossible in typst-cli but possible if a custom tool mutates the fonts.
self.fonts.get(index)?.get()
} }
fn today(&self, offset: Option<i64>) -> Option<Datetime> { fn today(&self, offset: Option<i64>) -> Option<Datetime> {
@ -337,6 +349,11 @@ impl<T: Clone> SlotCell<T> {
self.accessed = false; self.accessed = false;
} }
/// Gets the contents of the cell.
fn get(&self) -> Option<&FileResult<T>> {
self.data.as_ref()
}
/// Gets the contents of the cell or initialize them. /// Gets the contents of the cell or initialize them.
fn get_or_init( fn get_or_init(
&mut self, &mut self,

View File

@ -25,19 +25,22 @@ impl Eval for ast::FuncCall<'_> {
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> { fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
let span = self.span(); let span = self.span();
let callee = self.callee(); let callee = self.callee();
let in_math = in_math(callee);
let callee_span = callee.span(); let callee_span = callee.span();
let args = self.args(); let args = self.args();
let trailing_comma = args.trailing_comma();
vm.engine.route.check_call_depth().at(span)?; vm.engine.route.check_call_depth().at(span)?;
// Try to evaluate as a call to an associated function or field. // Try to evaluate as a call to an associated function or field.
let (callee, args) = if let ast::Expr::FieldAccess(access) = callee { let (callee_value, args_value) = if let ast::Expr::FieldAccess(access) = callee {
let target = access.target(); let target = access.target();
let field = access.field(); let field = access.field();
match eval_field_call(target, field, args, span, vm)? { match eval_field_call(target, field, args, span, vm)? {
FieldCall::Normal(callee, args) => (callee, args), FieldCall::Normal(callee, args) => {
if vm.inspected == Some(callee_span) {
vm.trace(callee.clone());
}
(callee, args)
}
FieldCall::Resolved(value) => return Ok(value), FieldCall::Resolved(value) => return Ok(value),
} }
} else { } else {
@ -45,9 +48,15 @@ impl Eval for ast::FuncCall<'_> {
(callee.eval(vm)?, args.eval(vm)?.spanned(span)) (callee.eval(vm)?, args.eval(vm)?.spanned(span))
}; };
let func_result = callee.clone().cast::<Func>(); let func_result = callee_value.clone().cast::<Func>();
if in_math && func_result.is_err() {
return wrap_args_in_math(callee, callee_span, args, trailing_comma); if func_result.is_err() && in_math(callee) {
return wrap_args_in_math(
callee_value,
callee_span,
args_value,
args.trailing_comma(),
);
} }
let func = func_result let func = func_result
@ -56,8 +65,11 @@ impl Eval for ast::FuncCall<'_> {
let point = || Tracepoint::Call(func.name().map(Into::into)); let point = || Tracepoint::Call(func.name().map(Into::into));
let f = || { let f = || {
func.call(&mut vm.engine, vm.context, args) func.call(&mut vm.engine, vm.context, args_value).trace(
.trace(vm.world(), point, span) vm.world(),
point,
span,
)
}; };
// Stacker is broken on WASM. // Stacker is broken on WASM.
@ -404,12 +416,14 @@ fn wrap_args_in_math(
if trailing_comma { if trailing_comma {
body += SymbolElem::packed(','); body += SymbolElem::packed(',');
} }
Ok(Value::Content(
callee.display().spanned(callee_span) let formatted = callee.display().spanned(callee_span)
+ LrElem::new(SymbolElem::packed('(') + body + SymbolElem::packed(')')) + LrElem::new(SymbolElem::packed('(') + body + SymbolElem::packed(')'))
.pack() .pack()
.spanned(args.span), .spanned(args.span);
))
args.finish()?;
Ok(Value::Content(formatted))
} }
/// Provide a hint if the callee is a shadowed standard library function. /// Provide a hint if the callee is a shadowed standard library function.

View File

@ -101,6 +101,7 @@ pub fn eval(
pub fn eval_string( pub fn eval_string(
routines: &Routines, routines: &Routines,
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str, string: &str,
span: Span, span: Span,
mode: EvalMode, mode: EvalMode,
@ -121,7 +122,6 @@ pub fn eval_string(
} }
// Prepare the engine. // Prepare the engine.
let mut sink = Sink::new();
let introspector = Introspector::default(); let introspector = Introspector::default();
let traced = Traced::default(); let traced = Traced::default();
let engine = Engine { let engine = Engine {
@ -129,7 +129,7 @@ pub fn eval_string(
world, world,
introspector: introspector.track(), introspector: introspector.track(),
traced: traced.track(), traced: traced.track(),
sink: sink.track_mut(), sink,
route: Route::default(), route: Route::default(),
}; };

View File

@ -263,13 +263,13 @@ fn handle(
/// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted, /// Wrap the nodes in `<html>` and `<body>` if they are not yet rooted,
/// supplying a suitable `<head>`. /// supplying a suitable `<head>`.
fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> { fn root_element(output: Vec<HtmlNode>, info: &DocumentInfo) -> SourceResult<HtmlElement> {
let head = head_element(info);
let body = match classify_output(output)? { let body = match classify_output(output)? {
OutputKind::Html(element) => return Ok(element), OutputKind::Html(element) => return Ok(element),
OutputKind::Body(body) => body, OutputKind::Body(body) => body,
OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs), OutputKind::Leafs(leafs) => HtmlElement::new(tag::body).with_children(leafs),
}; };
Ok(HtmlElement::new(tag::html) Ok(HtmlElement::new(tag::html).with_children(vec![head.into(), body.into()]))
.with_children(vec![head_element(info).into(), body.into()]))
} }
/// Generate a `<head>` element. /// Generate a `<head>` element.
@ -302,6 +302,24 @@ fn head_element(info: &DocumentInfo) -> HtmlElement {
); );
} }
if !info.author.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "authors")
.with_attr(attr::content, info.author.join(", "))
.into(),
)
}
if !info.keywords.is_empty() {
children.push(
HtmlElement::new(tag::meta)
.with_attr(attr::name, "keywords")
.with_attr(attr::content, info.keywords.join(", "))
.into(),
)
}
HtmlElement::new(tag::head).with_children(children) HtmlElement::new(tag::head).with_children(children)
} }

View File

@ -15,7 +15,7 @@ use typst::syntax::{
ast, is_id_continue, is_id_start, is_ident, FileId, LinkedNode, Side, Source, ast, is_id_continue, is_id_start, is_ident, FileId, LinkedNode, Side, Source,
SyntaxKind, SyntaxKind,
}; };
use typst::text::RawElem; use typst::text::{FontFlags, RawElem};
use typst::visualize::Color; use typst::visualize::Color;
use unscanny::Scanner; use unscanny::Scanner;
@ -298,13 +298,20 @@ fn complete_math(ctx: &mut CompletionContext) -> bool {
return false; return false;
} }
// Start of an interpolated identifier: "#|". // Start of an interpolated identifier: "$#|$".
if ctx.leaf.kind() == SyntaxKind::Hash { if ctx.leaf.kind() == SyntaxKind::Hash {
ctx.from = ctx.cursor; ctx.from = ctx.cursor;
code_completions(ctx, true); code_completions(ctx, true);
return true; return true;
} }
// Behind existing interpolated identifier: "$#pa|$".
if ctx.leaf.kind() == SyntaxKind::Ident {
ctx.from = ctx.leaf.offset();
code_completions(ctx, true);
return true;
}
// Behind existing atom or identifier: "$a|$" or "$abc|$". // Behind existing atom or identifier: "$a|$" or "$abc|$".
if matches!( if matches!(
ctx.leaf.kind(), ctx.leaf.kind(),
@ -841,7 +848,9 @@ fn param_value_completions<'a>(
/// Returns which file extensions to complete for the given parameter if any. /// Returns which file extensions to complete for the given parameter if any.
fn path_completion(func: &Func, param: &ParamInfo) -> Option<&'static [&'static str]> { fn path_completion(func: &Func, param: &ParamInfo) -> Option<&'static [&'static str]> {
Some(match (func.name(), param.name) { Some(match (func.name(), param.name) {
(Some("image"), "source") => &["png", "jpg", "jpeg", "gif", "svg", "svgz"], (Some("image"), "source") => {
&["png", "jpg", "jpeg", "gif", "svg", "svgz", "webp"]
}
(Some("csv"), "source") => &["csv"], (Some("csv"), "source") => &["csv"],
(Some("plugin"), "source") => &["wasm"], (Some("plugin"), "source") => &["wasm"],
(Some("cbor"), "source") => &["cbor"], (Some("cbor"), "source") => &["cbor"],
@ -1081,6 +1090,24 @@ fn code_completions(ctx: &mut CompletionContext, hash: bool) {
} }
} }
/// See if the AST node is somewhere within a show rule applying to equations
fn is_in_equation_show_rule(leaf: &LinkedNode<'_>) -> bool {
let mut node = leaf;
while let Some(parent) = node.parent() {
if_chain! {
if let Some(expr) = parent.get().cast::<ast::Expr>();
if let ast::Expr::ShowRule(show) = expr;
if let Some(ast::Expr::FieldAccess(field)) = show.selector();
if field.field().as_str() == "equation";
then {
return true;
}
}
node = parent;
}
false
}
/// Context for autocompletion. /// Context for autocompletion.
struct CompletionContext<'a> { struct CompletionContext<'a> {
world: &'a (dyn IdeWorld + 'a), world: &'a (dyn IdeWorld + 'a),
@ -1152,10 +1179,12 @@ impl<'a> CompletionContext<'a> {
/// Add completions for all font families. /// Add completions for all font families.
fn font_completions(&mut self) { fn font_completions(&mut self) {
let equation = self.before_window(25).contains("equation"); let equation = is_in_equation_show_rule(self.leaf);
for (family, iter) in self.world.book().families() { for (family, iter) in self.world.book().families() {
let detail = summarize_font_family(iter); let variants: Vec<_> = iter.collect();
if !equation || family.contains("Math") { let is_math = variants.iter().any(|f| f.flags.contains(FontFlags::MATH));
let detail = summarize_font_family(variants);
if !equation || is_math {
self.str_completion( self.str_completion(
family, family,
Some(CompletionKind::Font), Some(CompletionKind::Font),
@ -1644,6 +1673,13 @@ mod tests {
test("#{() .a}", -2).must_include(["at", "any", "all"]); test("#{() .a}", -2).must_include(["at", "any", "all"]);
} }
/// Test that autocomplete in math uses the correct global scope.
#[test]
fn test_autocomplete_math_scope() {
test("$#col$", -2).must_include(["colbreak"]).must_exclude(["colon"]);
test("$col$", -2).must_include(["colon"]).must_exclude(["colbreak"]);
}
/// Test that the `before_window` doesn't slice into invalid byte /// Test that the `before_window` doesn't slice into invalid byte
/// boundaries. /// boundaries.
#[test] #[test]
@ -1662,7 +1698,7 @@ mod tests {
// Then, add the invalid `#cite` call. Had the document been invalid // Then, add the invalid `#cite` call. Had the document been invalid
// initially, we would have no populated document to autocomplete with. // initially, we would have no populated document to autocomplete with.
let end = world.main.len_bytes(); let end = world.main.text().len();
world.main.edit(end..end, " #cite()"); world.main.edit(end..end, " #cite()");
test_with_doc(&world, -2, doc.as_ref()) test_with_doc(&world, -2, doc.as_ref())
@ -1790,4 +1826,21 @@ mod tests {
.must_include(["r", "dashed"]) .must_include(["r", "dashed"])
.must_exclude(["cases"]); .must_exclude(["cases"]);
} }
#[test]
fn test_autocomplete_fonts() {
test("#text(font:)", -1)
.must_include(["\"Libertinus Serif\"", "\"New Computer Modern Math\""]);
test("#show link: set text(font: )", -1)
.must_include(["\"Libertinus Serif\"", "\"New Computer Modern Math\""]);
test("#show math.equation: set text(font: )", -1)
.must_include(["\"New Computer Modern Math\""])
.must_exclude(["\"Libertinus Serif\""]);
test("#show math.equation: it => { set text(font: )\nit }", -6)
.must_include(["\"New Computer Modern Math\""])
.must_exclude(["\"Libertinus Serif\""]);
}
} }

View File

@ -3,7 +3,7 @@ use std::num::NonZeroUsize;
use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size}; use typst::layout::{Frame, FrameItem, PagedDocument, Point, Position, Size};
use typst::model::{Destination, Url}; use typst::model::{Destination, Url};
use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind}; use typst::syntax::{FileId, LinkedNode, Side, Source, Span, SyntaxKind};
use typst::visualize::Geometry; use typst::visualize::{Curve, CurveItem, FillRule, Geometry};
use typst::WorldExt; use typst::WorldExt;
use crate::IdeWorld; use crate::IdeWorld;
@ -53,10 +53,20 @@ pub fn jump_from_click(
for (mut pos, item) in frame.items().rev() { for (mut pos, item) in frame.items().rev() {
match item { match item {
FrameItem::Group(group) => { FrameItem::Group(group) => {
// TODO: Handle transformation. let pos = click - pos;
if let Some(span) = if let Some(clip) = &group.clip {
jump_from_click(world, document, &group.frame, click - pos) if !clip.contains(FillRule::NonZero, pos) {
{ continue;
}
}
// Realistic transforms should always be invertible.
// An example of one that isn't is a scale of 0, which would
// not be clickable anyway.
let Some(inv_transform) = group.transform.invert() else {
continue;
};
let pos = pos.transform_inf(inv_transform);
if let Some(span) = jump_from_click(world, document, &group.frame, pos) {
return Some(span); return Some(span);
} }
} }
@ -94,12 +104,35 @@ pub fn jump_from_click(
} }
FrameItem::Shape(shape, span) => { FrameItem::Shape(shape, span) => {
let Geometry::Rect(size) = shape.geometry else { continue }; if shape.fill.is_some() {
if is_in_rect(pos, size, click) { let within = match &shape.geometry {
Geometry::Line(..) => false,
Geometry::Rect(size) => is_in_rect(pos, *size, click),
Geometry::Curve(curve) => {
curve.contains(shape.fill_rule, click - pos)
}
};
if within {
return Jump::from_span(world, *span); return Jump::from_span(world, *span);
} }
} }
if let Some(stroke) = &shape.stroke {
let within = !stroke.thickness.approx_empty() && {
// This curve is rooted at (0, 0), not `pos`.
let base_curve = match &shape.geometry {
Geometry::Line(to) => &Curve(vec![CurveItem::Line(*to)]),
Geometry::Rect(size) => &Curve::rect(*size),
Geometry::Curve(curve) => curve,
};
base_curve.stroke_contains(stroke, click - pos)
};
if within {
return Jump::from_span(world, *span);
}
}
}
FrameItem::Image(_, size, span) if is_in_rect(pos, *size, click) => { FrameItem::Image(_, size, span) if is_in_rect(pos, *size, click) => {
return Jump::from_span(world, *span); return Jump::from_span(world, *span);
} }
@ -146,9 +179,8 @@ pub fn jump_from_cursor(
fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> { fn find_in_frame(frame: &Frame, span: Span) -> Option<Point> {
for (mut pos, item) in frame.items() { for (mut pos, item) in frame.items() {
if let FrameItem::Group(group) = item { if let FrameItem::Group(group) = item {
// TODO: Handle transformation.
if let Some(point) = find_in_frame(&group.frame, span) { if let Some(point) = find_in_frame(&group.frame, span) {
return Some(point + pos); return Some(pos + point.transform(group.transform));
} }
} }
@ -269,6 +301,97 @@ mod tests {
test_click("$a + b$", point(28.0, 14.0), cursor(5)); test_click("$a + b$", point(28.0, 14.0), cursor(5));
} }
#[test]
fn test_jump_from_click_transform_clip() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 20pt, height: 20pt, fill: black)",
point(10.0, 10.0) + margin,
cursor(1),
);
test_click(
"#rect(width: 60pt, height: 10pt, fill: black)",
point(5.0, 30.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left, rect(width: 60pt, height: 10pt, fill: black))",
point(5.0, 30.0) + margin,
cursor(38),
);
test_click(
"#scale(x: 300%, y: 300%, origin: top + left, rect(width: 10pt, height: 10pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, scale(x: 300%, y: 300%, \
origin: top + left, rect(width: 10pt, height: 10pt, fill: black)))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#box(width: 10pt, height: 10pt, clip: false, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
cursor(45),
);
test_click(
"#box(width: 10pt, height: 10pt, clip: true, rect(width: 30pt, height: 30pt, fill: black))",
point(20.0, 20.0) + margin,
None,
);
test_click(
"#rotate(90deg, origin: bottom + left)[hello world]",
point(5.0, 15.0) + margin,
cursor(40),
);
}
#[test]
fn test_jump_from_click_shapes() {
let margin = point(10.0, 10.0);
test_click(
"#rect(width: 30pt, height: 30pt, fill: black)",
point(15.0, 15.0) + margin,
cursor(1),
);
let circle = "#circle(width: 30pt, height: 30pt, fill: black)";
test_click(circle, point(15.0, 15.0) + margin, cursor(1));
test_click(circle, point(1.0, 1.0) + margin, None);
let bowtie =
"#polygon(fill: black, (0pt, 0pt), (20pt, 20pt), (20pt, 0pt), (0pt, 20pt))";
test_click(bowtie, point(1.0, 2.0) + margin, cursor(1));
test_click(bowtie, point(2.0, 1.0) + margin, None);
test_click(bowtie, point(19.0, 10.0) + margin, cursor(1));
let evenodd = r#"#polygon(fill: black, fill-rule: "even-odd",
(0pt, 10pt), (30pt, 10pt), (30pt, 20pt), (20pt, 20pt),
(20pt, 0pt), (10pt, 0pt), (10pt, 30pt), (20pt, 30pt),
(20pt, 20pt), (0pt, 20pt))"#;
test_click(evenodd, point(15.0, 15.0) + margin, None);
test_click(evenodd, point(5.0, 15.0) + margin, cursor(1));
test_click(evenodd, point(15.0, 5.0) + margin, cursor(1));
}
#[test]
fn test_jump_from_click_shapes_stroke() {
let margin = point(10.0, 10.0);
let rect =
"#place(dx: 10pt, dy: 10pt, rect(width: 10pt, height: 10pt, stroke: 5pt))";
test_click(rect, point(15.0, 15.0) + margin, None);
test_click(rect, point(10.0, 15.0) + margin, cursor(27));
test_click(
"#line(angle: 45deg, length: 10pt, stroke: 2pt)",
point(2.0, 2.0) + margin,
cursor(1),
);
}
#[test] #[test]
fn test_jump_from_cursor() { fn test_jump_from_cursor() {
let s = "*Hello* #box[ABC] World"; let s = "*Hello* #box[ABC] World";
@ -281,6 +404,15 @@ mod tests {
test_cursor("$a + b$", -3, pos(1, 27.51, 16.83)); test_cursor("$a + b$", -3, pos(1, 27.51, 16.83));
} }
#[test]
fn test_jump_from_cursor_transform() {
test_cursor(
r#"#rotate(90deg, origin: bottom + left, [hello world])"#,
-5,
pos(1, 10.0, 16.58),
);
}
#[test] #[test]
fn test_backlink() { fn test_backlink() {
let s = "#footnote[Hi]"; let s = "#footnote[Hi]";

View File

@ -97,7 +97,7 @@ impl World for TestWorld {
} }
fn font(&self, index: usize) -> Option<Font> { fn font(&self, index: usize) -> Option<Font> {
Some(self.base.fonts[index].clone()) self.base.fonts.get(index).cloned()
} }
fn today(&self, _: Option<i64>) -> Option<Datetime> { fn today(&self, _: Option<i64>) -> Option<Datetime> {
@ -228,7 +228,7 @@ impl FilePos for (&str, isize) {
#[track_caller] #[track_caller]
fn cursor(source: &Source, cursor: isize) -> usize { fn cursor(source: &Source, cursor: isize) -> usize {
if cursor < 0 { if cursor < 0 {
source.len_bytes().checked_add_signed(cursor + 1).unwrap() source.text().len().checked_add_signed(cursor + 1).unwrap()
} else { } else {
cursor as usize cursor as usize
} }

View File

@ -86,7 +86,7 @@ fn expr_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
*count += 1; *count += 1;
continue; continue;
} else if *count > 1 { } else if *count > 1 {
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap(); write!(pieces.last_mut().unwrap(), " (×{count})").unwrap();
} }
} }
pieces.push(value.repr()); pieces.push(value.repr());
@ -95,7 +95,7 @@ fn expr_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
if let Some((_, count)) = last { if let Some((_, count)) = last {
if count > 1 { if count > 1 {
write!(pieces.last_mut().unwrap(), " (x{count})").unwrap(); write!(pieces.last_mut().unwrap(), " (×{count})").unwrap();
} }
} }
@ -269,7 +269,7 @@ fn font_tooltip(world: &dyn IdeWorld, leaf: &LinkedNode) -> Option<Tooltip> {
.find(|&(family, _)| family.to_lowercase().as_str() == lower.as_str()); .find(|&(family, _)| family.to_lowercase().as_str() == lower.as_str());
then { then {
let detail = summarize_font_family(iter); let detail = summarize_font_family(iter.collect());
return Some(Tooltip::Text(detail)); return Some(Tooltip::Text(detail));
} }
}; };
@ -371,4 +371,11 @@ mod tests {
test(&world, -2, Side::Before).must_be_none(); test(&world, -2, Side::Before).must_be_none();
test(&world, -2, Side::After).must_be_text("This star imports `a`, `b`, and `c`"); test(&world, -2, Side::After).must_be_text("This star imports `a`, `b`, and `c`");
} }
#[test]
fn test_tooltip_field_call() {
let world = TestWorld::new("#import \"other.typ\"\n#other.f()")
.with_source("other.typ", "#let f = (x) => 1");
test(&world, -4, Side::After).must_be_code("(..) => ..");
}
} }

View File

@ -77,23 +77,20 @@ pub fn plain_docs_sentence(docs: &str) -> EcoString {
} }
/// Create a short description of a font family. /// Create a short description of a font family.
pub fn summarize_font_family<'a>( pub fn summarize_font_family(mut variants: Vec<&FontInfo>) -> EcoString {
variants: impl Iterator<Item = &'a FontInfo>, variants.sort_by_key(|info| info.variant);
) -> EcoString {
let mut infos: Vec<_> = variants.collect();
infos.sort_by_key(|info| info.variant);
let mut has_italic = false; let mut has_italic = false;
let mut min_weight = u16::MAX; let mut min_weight = u16::MAX;
let mut max_weight = 0; let mut max_weight = 0;
for info in &infos { for info in &variants {
let weight = info.variant.weight.to_number(); let weight = info.variant.weight.to_number();
has_italic |= info.variant.style == FontStyle::Italic; has_italic |= info.variant.style == FontStyle::Italic;
min_weight = min_weight.min(weight); min_weight = min_weight.min(weight);
max_weight = min_weight.max(weight); max_weight = min_weight.max(weight);
} }
let count = infos.len(); let count = variants.len();
let mut detail = eco_format!("{count} variant{}.", if count == 1 { "" } else { "s" }); let mut detail = eco_format!("{count} variant{}.", if count == 1 { "" } else { "s" });
if min_weight == max_weight { if min_weight == max_weight {
@ -117,7 +114,9 @@ pub fn globals<'a>(world: &'a dyn IdeWorld, leaf: &LinkedNode) -> &'a Scope {
| Some(SyntaxKind::Math) | Some(SyntaxKind::Math)
| Some(SyntaxKind::MathFrac) | Some(SyntaxKind::MathFrac)
| Some(SyntaxKind::MathAttach) | Some(SyntaxKind::MathAttach)
); ) && leaf
.prev_leaf()
.is_none_or(|prev| !matches!(prev.kind(), SyntaxKind::Hash));
let library = world.library(); let library = world.library();
if in_math { if in_math {

View File

@ -128,8 +128,7 @@ impl Downloader {
} }
// Configure native TLS. // Configure native TLS.
let connector = let connector = tls.build().map_err(io::Error::other)?;
tls.build().map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
builder = builder.tls_connector(Arc::new(connector)); builder = builder.tls_connector(Arc::new(connector));
builder.build().get(url).call() builder.build().get(url).call()

View File

@ -30,6 +30,7 @@ icu_provider_adapters = { workspace = true }
icu_provider_blob = { workspace = true } icu_provider_blob = { workspace = true }
icu_segmenter = { workspace = true } icu_segmenter = { workspace = true }
kurbo = { workspace = true } kurbo = { workspace = true }
memchr = { workspace = true }
rustybuzz = { workspace = true } rustybuzz = { workspace = true }
smallvec = { workspace = true } smallvec = { workspace = true }
ttf-parser = { workspace = true } ttf-parser = { workspace = true }

View File

@ -3,7 +3,9 @@ use std::fmt::Debug;
use typst_library::diag::{bail, SourceResult}; use typst_library::diag::{bail, SourceResult};
use typst_library::engine::Engine; use typst_library::engine::Engine;
use typst_library::foundations::{Resolve, StyleChain}; use typst_library::foundations::{Resolve, StyleChain};
use typst_library::layout::grid::resolve::{Cell, CellGrid, LinePosition, Repeatable}; use typst_library::layout::grid::resolve::{
Cell, CellGrid, Header, LinePosition, Repeatable,
};
use typst_library::layout::{ use typst_library::layout::{
Abs, Axes, Dir, Fr, Fragment, Frame, FrameItem, Length, Point, Region, Regions, Rel, Abs, Axes, Dir, Fr, Fragment, Frame, FrameItem, Length, Point, Region, Regions, Rel,
Size, Sizing, Size, Sizing,
@ -11,7 +13,7 @@ use typst_library::layout::{
use typst_library::text::TextElem; use typst_library::text::TextElem;
use typst_library::visualize::Geometry; use typst_library::visualize::Geometry;
use typst_syntax::Span; use typst_syntax::Span;
use typst_utils::{MaybeReverseIter, Numeric}; use typst_utils::Numeric;
use super::{ use super::{
generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row, generate_line_segments, hline_stroke_at_column, layout_cell, vline_stroke_at_row,
@ -30,10 +32,8 @@ pub struct GridLayouter<'a> {
pub(super) rcols: Vec<Abs>, pub(super) rcols: Vec<Abs>,
/// The sum of `rcols`. /// The sum of `rcols`.
pub(super) width: Abs, pub(super) width: Abs,
/// Resolve row sizes, by region. /// Resolved row sizes, by region.
pub(super) rrows: Vec<Vec<RowPiece>>, pub(super) rrows: Vec<Vec<RowPiece>>,
/// Rows in the current region.
pub(super) lrows: Vec<Row>,
/// The amount of unbreakable rows remaining to be laid out in the /// The amount of unbreakable rows remaining to be laid out in the
/// current unbreakable row group. While this is positive, no region breaks /// current unbreakable row group. While this is positive, no region breaks
/// should occur. /// should occur.
@ -41,24 +41,155 @@ pub struct GridLayouter<'a> {
/// Rowspans not yet laid out because not all of their spanned rows were /// Rowspans not yet laid out because not all of their spanned rows were
/// laid out yet. /// laid out yet.
pub(super) rowspans: Vec<Rowspan>, pub(super) rowspans: Vec<Rowspan>,
/// The initial size of the current region before we started subtracting. /// Grid layout state for the current region.
pub(super) initial: Size, pub(super) current: Current,
/// Frames for finished regions. /// Frames for finished regions.
pub(super) finished: Vec<Frame>, pub(super) finished: Vec<Frame>,
/// The amount and height of header rows on each finished region.
pub(super) finished_header_rows: Vec<FinishedHeaderRowInfo>,
/// Whether this is an RTL grid. /// Whether this is an RTL grid.
pub(super) is_rtl: bool, pub(super) is_rtl: bool,
/// The simulated header height. /// Currently repeating headers, one per level. Sorted by increasing
/// This field is reset in `layout_header` and properly updated by /// levels.
///
/// Note that some levels may be absent, in particular level 0, which does
/// not exist (so all levels are >= 1).
pub(super) repeating_headers: Vec<&'a Header>,
/// Headers, repeating or not, awaiting their first successful layout.
/// Sorted by increasing levels.
pub(super) pending_headers: &'a [Repeatable<Header>],
/// Next headers to be processed.
pub(super) upcoming_headers: &'a [Repeatable<Header>],
/// State of the row being currently laid out.
///
/// This is kept as a field to avoid passing down too many parameters from
/// `layout_row` into called functions, which would then have to pass them
/// down to `push_row`, which reads these values.
pub(super) row_state: RowState,
/// The span of the grid element.
pub(super) span: Span,
}
/// Grid layout state for the current region. This should be reset or updated
/// on each region break.
pub(super) struct Current {
/// The initial size of the current region before we started subtracting.
pub(super) initial: Size,
/// The height of the region after repeated headers were placed and footers
/// prepared. This also includes pending repeating headers from the start,
/// even if they were not repeated yet, since they will be repeated in the
/// next region anyway (bar orphan prevention).
///
/// This is used to quickly tell if any additional space in the region has
/// been occupied since then, meaning that additional space will become
/// available after a region break (see
/// [`GridLayouter::may_progress_with_repeats`]).
pub(super) initial_after_repeats: Abs,
/// Whether `layouter.regions.may_progress()` was `true` at the top of the
/// region.
pub(super) could_progress_at_top: bool,
/// Rows in the current region.
pub(super) lrows: Vec<Row>,
/// The amount of repeated header rows at the start of the current region.
/// Thus, excludes rows from pending headers (which were placed for the
/// first time).
///
/// Note that `repeating_headers` and `pending_headers` can change if we
/// find a new header inside the region (not at the top), so this field
/// is required to access information from the top of the region.
///
/// This information is used on finish region to calculate the total height
/// of resolved header rows at the top of the region, which is used by
/// multi-page rowspans so they can properly skip the header rows at the
/// top of each region during layout.
pub(super) repeated_header_rows: usize,
/// The end bound of the row range of the last repeating header at the
/// start of the region.
///
/// The last row might have disappeared from layout due to being empty, so
/// this is how we can become aware of where the last header ends without
/// having to check the vector of rows. Line layout uses this to determine
/// when to prioritize the last lines under a header.
///
/// A value of zero indicates no repeated headers were placed.
pub(super) last_repeated_header_end: usize,
/// Stores the length of `lrows` before a sequence of rows equipped with
/// orphan prevention was laid out. In this case, if no more rows without
/// orphan prevention are laid out after those rows before the region ends,
/// the rows will be removed, and there may be an attempt to place them
/// again in the new region. Effectively, this is the mechanism used for
/// orphan prevention of rows.
///
/// At the moment, this is only used by repeated headers (they aren't laid
/// out if alone in the region) and by new headers, which are moved to the
/// `pending_headers` vector and so will automatically be placed again
/// until they fit and are not orphans in at least one region (or exactly
/// one, for non-repeated headers).
pub(super) lrows_orphan_snapshot: Option<usize>,
/// The height of effectively repeating headers, that is, ignoring
/// non-repeating pending headers, in the current region.
///
/// This is used by multi-page auto rows so they can inform cell layout on
/// how much space should be taken by headers if they break across regions.
/// In particular, non-repeating headers only occupy the initial region,
/// but disappear on new regions, so they can be ignored.
///
/// This field is reset on each new region and properly updated by
/// `layout_auto_row` and `layout_relative_row`, and should not be read /// `layout_auto_row` and `layout_relative_row`, and should not be read
/// before all header rows are fully laid out. It is usually fine because /// before all header rows are fully laid out. It is usually fine because
/// header rows themselves are unbreakable, and unbreakable rows do not /// header rows themselves are unbreakable, and unbreakable rows do not
/// need to read this field at all. /// need to read this field at all.
pub(super) header_height: Abs, ///
/// This height is not only computed at the beginning of the region. It is
/// updated whenever a new header is found, subtracting the height of
/// headers which stopped repeating and adding the height of all new
/// headers.
pub(super) repeating_header_height: Abs,
/// The height for each repeating header that was placed in this region.
/// Note that this includes headers not at the top of the region, before
/// their first repetition (pending headers), and excludes headers removed
/// by virtue of a new, conflicting header being found (short-lived
/// headers).
///
/// This is used to know how much to update `repeating_header_height` by
/// when finding a new header and causing existing repeating headers to
/// stop.
pub(super) repeating_header_heights: Vec<Abs>,
/// The simulated footer height for this region. /// The simulated footer height for this region.
///
/// The simulation occurs before any rows are laid out for a region. /// The simulation occurs before any rows are laid out for a region.
pub(super) footer_height: Abs, pub(super) footer_height: Abs,
/// The span of the grid element. }
pub(super) span: Span,
/// Data about the row being laid out right now.
#[derive(Debug, Default)]
pub(super) struct RowState {
/// If this is `Some`, this will be updated by the currently laid out row's
/// height if it is auto or relative. This is used for header height
/// calculation.
pub(super) current_row_height: Option<Abs>,
/// This is `true` when laying out non-short lived headers and footers.
/// That is, headers and footers which are not immediately followed or
/// preceded (respectively) by conflicting headers and footers of same or
/// lower level, or the end or start of the table (respectively), which
/// would cause them to never repeat, even once.
///
/// If this is `false`, the next row to be laid out will remove an active
/// orphan snapshot and will flush pending headers, as there is no risk
/// that they will be orphans anymore.
pub(super) in_active_repeatable: bool,
}
/// Data about laid out repeated header rows for a specific finished region.
#[derive(Debug, Default)]
pub(super) struct FinishedHeaderRowInfo {
/// The amount of repeated headers at the top of the region.
pub(super) repeated_amount: usize,
/// The end bound of the row range of the last repeated header at the top
/// of the region.
pub(super) last_repeated_header_end: usize,
/// The total height of repeated headers at the top of the region.
pub(super) repeated_height: Abs,
} }
/// Details about a resulting row piece. /// Details about a resulting row piece.
@ -114,14 +245,27 @@ impl<'a> GridLayouter<'a> {
rcols: vec![Abs::zero(); grid.cols.len()], rcols: vec![Abs::zero(); grid.cols.len()],
width: Abs::zero(), width: Abs::zero(),
rrows: vec![], rrows: vec![],
lrows: vec![],
unbreakable_rows_left: 0, unbreakable_rows_left: 0,
rowspans: vec![], rowspans: vec![],
initial: regions.size,
finished: vec![], finished: vec![],
finished_header_rows: vec![],
is_rtl: TextElem::dir_in(styles) == Dir::RTL, is_rtl: TextElem::dir_in(styles) == Dir::RTL,
header_height: Abs::zero(), repeating_headers: vec![],
upcoming_headers: &grid.headers,
pending_headers: Default::default(),
row_state: RowState::default(),
current: Current {
initial: regions.size,
initial_after_repeats: regions.size.y,
could_progress_at_top: regions.may_progress(),
lrows: vec![],
repeated_header_rows: 0,
last_repeated_header_end: 0,
lrows_orphan_snapshot: None,
repeating_header_height: Abs::zero(),
repeating_header_heights: vec![],
footer_height: Abs::zero(), footer_height: Abs::zero(),
},
span, span,
} }
} }
@ -130,38 +274,57 @@ impl<'a> GridLayouter<'a> {
pub fn layout(mut self, engine: &mut Engine) -> SourceResult<Fragment> { pub fn layout(mut self, engine: &mut Engine) -> SourceResult<Fragment> {
self.measure_columns(engine)?; self.measure_columns(engine)?;
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer { if let Some(footer) = &self.grid.footer {
// Ensure rows in the first region will be aware of the possible if footer.repeated {
// presence of the footer. // Ensure rows in the first region will be aware of the
// possible presence of the footer.
self.prepare_footer(footer, engine, 0)?; self.prepare_footer(footer, engine, 0)?;
if matches!(self.grid.header, None | Some(Repeatable::NotRepeated(_))) { self.regions.size.y -= self.current.footer_height;
// No repeatable header, so we won't subtract it later. self.current.initial_after_repeats = self.regions.size.y;
self.regions.size.y -= self.footer_height;
} }
} }
for y in 0..self.grid.rows.len() { let mut y = 0;
if let Some(Repeatable::Repeated(header)) = &self.grid.header { let mut consecutive_header_count = 0;
if y < header.end { while y < self.grid.rows.len() {
if y == 0 { if let Some(next_header) = self.upcoming_headers.get(consecutive_header_count)
self.layout_header(header, engine, 0)?; {
self.regions.size.y -= self.footer_height; if next_header.range.contains(&y) {
} self.place_new_headers(&mut consecutive_header_count, engine)?;
y = next_header.range.end;
// Skip header rows during normal layout. // Skip header rows during normal layout.
continue; continue;
} }
} }
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer { if let Some(footer) = &self.grid.footer {
if y >= footer.start { if footer.repeated && y >= footer.start {
if y == footer.start { if y == footer.start {
self.layout_footer(footer, engine, self.finished.len())?; self.layout_footer(footer, engine, self.finished.len())?;
self.flush_orphans();
} }
y = footer.end;
continue; continue;
} }
} }
self.layout_row(y, engine, 0)?; self.layout_row(y, engine, 0)?;
// After the first non-header row is placed, pending headers are no
// longer orphans and can repeat, so we move them to repeating
// headers.
//
// Note that this is usually done in `push_row`, since the call to
// `layout_row` above might trigger region breaks (for multi-page
// auto rows), whereas this needs to be called as soon as any part
// of a row is laid out. However, it's possible a row has no
// visible output and thus does not push any rows even though it
// was successfully laid out, in which case we additionally flush
// here just in case.
self.flush_orphans();
y += 1;
} }
self.finish_region(engine, true)?; self.finish_region(engine, true)?;
@ -184,12 +347,46 @@ impl<'a> GridLayouter<'a> {
self.render_fills_strokes() self.render_fills_strokes()
} }
/// Layout the given row. /// Layout a row with a certain initial state, returning the final state.
#[inline]
pub(super) fn layout_row_with_state(
&mut self,
y: usize,
engine: &mut Engine,
disambiguator: usize,
initial_state: RowState,
) -> SourceResult<RowState> {
// Keep a copy of the previous value in the stack, as this function can
// call itself recursively (e.g. if a region break is triggered and a
// header is placed), so we shouldn't outright overwrite it, but rather
// save and later restore the state when back to this call.
let previous = std::mem::replace(&mut self.row_state, initial_state);
// Keep it as a separate function to allow inlining the return below,
// as it's usually not needed.
self.layout_row_internal(y, engine, disambiguator)?;
Ok(std::mem::replace(&mut self.row_state, previous))
}
/// Layout the given row with the default row state.
#[inline]
pub(super) fn layout_row( pub(super) fn layout_row(
&mut self, &mut self,
y: usize, y: usize,
engine: &mut Engine, engine: &mut Engine,
disambiguator: usize, disambiguator: usize,
) -> SourceResult<()> {
self.layout_row_with_state(y, engine, disambiguator, RowState::default())?;
Ok(())
}
/// Layout the given row using the current state.
pub(super) fn layout_row_internal(
&mut self,
y: usize,
engine: &mut Engine,
disambiguator: usize,
) -> SourceResult<()> { ) -> SourceResult<()> {
// Skip to next region if current one is full, but only for content // Skip to next region if current one is full, but only for content
// rows, not for gutter rows, and only if we aren't laying out an // rows, not for gutter rows, and only if we aren't laying out an
@ -206,13 +403,18 @@ impl<'a> GridLayouter<'a> {
} }
// Don't layout gutter rows at the top of a region. // Don't layout gutter rows at the top of a region.
if is_content_row || !self.lrows.is_empty() { if is_content_row || !self.current.lrows.is_empty() {
match self.grid.rows[y] { match self.grid.rows[y] {
Sizing::Auto => self.layout_auto_row(engine, disambiguator, y)?, Sizing::Auto => self.layout_auto_row(engine, disambiguator, y)?,
Sizing::Rel(v) => { Sizing::Rel(v) => {
self.layout_relative_row(engine, disambiguator, v, y)? self.layout_relative_row(engine, disambiguator, v, y)?
} }
Sizing::Fr(v) => self.lrows.push(Row::Fr(v, y, disambiguator)), Sizing::Fr(v) => {
if !self.row_state.in_active_repeatable {
self.flush_orphans();
}
self.current.lrows.push(Row::Fr(v, y, disambiguator))
}
} }
} }
@ -225,8 +427,13 @@ impl<'a> GridLayouter<'a> {
fn render_fills_strokes(mut self) -> SourceResult<Fragment> { fn render_fills_strokes(mut self) -> SourceResult<Fragment> {
let mut finished = std::mem::take(&mut self.finished); let mut finished = std::mem::take(&mut self.finished);
let frame_amount = finished.len(); let frame_amount = finished.len();
for ((frame_index, frame), rows) in for (((frame_index, frame), rows), finished_header_rows) in
finished.iter_mut().enumerate().zip(&self.rrows) finished.iter_mut().enumerate().zip(&self.rrows).zip(
self.finished_header_rows
.iter()
.map(Some)
.chain(std::iter::repeat(None)),
)
{ {
if self.rcols.is_empty() || rows.is_empty() { if self.rcols.is_empty() || rows.is_empty() {
continue; continue;
@ -347,7 +554,8 @@ impl<'a> GridLayouter<'a> {
let hline_indices = rows let hline_indices = rows
.iter() .iter()
.map(|piece| piece.y) .map(|piece| piece.y)
.chain(std::iter::once(self.grid.rows.len())); .chain(std::iter::once(self.grid.rows.len()))
.enumerate();
// Converts a row to the corresponding index in the vector of // Converts a row to the corresponding index in the vector of
// hlines. // hlines.
@ -372,7 +580,7 @@ impl<'a> GridLayouter<'a> {
}; };
let mut prev_y = None; let mut prev_y = None;
for (y, dy) in hline_indices.zip(hline_offsets) { for ((i, y), dy) in hline_indices.zip(hline_offsets) {
// Position of lines below the row index in the previous iteration. // Position of lines below the row index in the previous iteration.
let expected_prev_line_position = prev_y let expected_prev_line_position = prev_y
.map(|prev_y| { .map(|prev_y| {
@ -383,47 +591,40 @@ impl<'a> GridLayouter<'a> {
}) })
.unwrap_or(LinePosition::Before); .unwrap_or(LinePosition::Before);
// FIXME: In the future, directly specify in 'self.rrows' when // Header's lines at the bottom have priority when repeated.
// we place a repeated header rather than its original rows. // This will store the end bound of the last header if the
// That would let us remove most of those verbose checks, both // current iteration is calculating lines under it.
// in 'lines.rs' and here. Those checks also aren't fully let last_repeated_header_end_above = match finished_header_rows {
// accurate either, since they will also trigger when some rows Some(info) if prev_y.is_some() && i == info.repeated_amount => {
// have been removed between the header and what's below it. Some(info.last_repeated_header_end)
let is_under_repeated_header = self }
.grid _ => None,
.header };
.as_ref()
.and_then(Repeatable::as_repeated)
.zip(prev_y)
.is_some_and(|(header, prev_y)| {
// Note: 'y == header.end' would mean we're right below
// the NON-REPEATED header, so that case should return
// false.
prev_y < header.end && y > header.end
});
// If some grid rows were omitted between the previous resolved // If some grid rows were omitted between the previous resolved
// row and the current one, we ensure lines below the previous // row and the current one, we ensure lines below the previous
// row don't "disappear" and are considered, albeit with less // row don't "disappear" and are considered, albeit with less
// priority. However, don't do this when we're below a header, // priority. However, don't do this when we're below a header,
// as it must have more priority instead of less, so it is // as it must have more priority instead of less, so it is
// chained later instead of before. The exception is when the // chained later instead of before (stored in the
// 'header_hlines' variable below). The exception is when the
// last row in the header is removed, in which case we append // last row in the header is removed, in which case we append
// both the lines under the row above us and also (later) the // both the lines under the row above us and also (later) the
// lines under the header's (removed) last row. // lines under the header's (removed) last row.
let prev_lines = prev_y let prev_lines = match prev_y {
.filter(|prev_y| { Some(prev_y)
prev_y + 1 != y if prev_y + 1 != y
&& (!is_under_repeated_header && last_repeated_header_end_above.is_none_or(
|| self |last_repeated_header_end| {
.grid prev_y + 1 != last_repeated_header_end
.header },
.as_ref() ) =>
.and_then(Repeatable::as_repeated) {
.is_some_and(|header| prev_y + 1 != header.end)) get_hlines_at(prev_y + 1)
}) }
.map(|prev_y| get_hlines_at(prev_y + 1))
.unwrap_or(&[]); _ => &[],
};
let expected_hline_position = let expected_hline_position =
expected_line_position(y, y == self.grid.rows.len()); expected_line_position(y, y == self.grid.rows.len());
@ -441,15 +642,13 @@ impl<'a> GridLayouter<'a> {
}; };
let mut expected_header_line_position = LinePosition::Before; let mut expected_header_line_position = LinePosition::Before;
let header_hlines = if let Some((Repeatable::Repeated(header), prev_y)) = let header_hlines = match (last_repeated_header_end_above, prev_y) {
self.grid.header.as_ref().zip(prev_y) (Some(header_end_above), Some(prev_y))
{ if !self.grid.has_gutter
if is_under_repeated_header
&& (!self.grid.has_gutter
|| matches!( || matches!(
self.grid.rows[prev_y], self.grid.rows[prev_y],
Sizing::Rel(length) if length.is_zero() Sizing::Rel(length) if length.is_zero()
)) ) =>
{ {
// For lines below a header, give priority to the // For lines below a header, give priority to the
// lines originally below the header rather than // lines originally below the header rather than
@ -468,15 +667,13 @@ impl<'a> GridLayouter<'a> {
// column-gutter is specified, for example. In that // column-gutter is specified, for example. In that
// case, we still repeat the line under the gutter. // case, we still repeat the line under the gutter.
expected_header_line_position = expected_line_position( expected_header_line_position = expected_line_position(
header.end, header_end_above,
header.end == self.grid.rows.len(), header_end_above == self.grid.rows.len(),
); );
get_hlines_at(header.end) get_hlines_at(header_end_above)
} else {
&[]
} }
} else {
&[] _ => &[],
}; };
// The effective hlines to be considered at this row index are // The effective hlines to be considered at this row index are
@ -529,6 +726,7 @@ impl<'a> GridLayouter<'a> {
grid, grid,
rows, rows,
local_top_y, local_top_y,
last_repeated_header_end_above,
in_last_region, in_last_region,
y, y,
x, x,
@ -574,7 +772,7 @@ impl<'a> GridLayouter<'a> {
// Reverse with RTL so that later columns start first. // Reverse with RTL so that later columns start first.
let mut dx = Abs::zero(); let mut dx = Abs::zero();
for (x, &col) in self.rcols.iter().enumerate().rev_if(self.is_rtl) { for (x, &col) in self.rcols.iter().enumerate() {
let mut dy = Abs::zero(); let mut dy = Abs::zero();
for row in rows { for row in rows {
// We want to only draw the fill starting at the parent // We want to only draw the fill starting at the parent
@ -643,18 +841,13 @@ impl<'a> GridLayouter<'a> {
.sum() .sum()
}; };
let width = self.cell_spanned_width(cell, x); let width = self.cell_spanned_width(cell, x);
// In the grid, cell colspans expand to the right, let mut pos = Point::new(dx, dy);
// so we're at the leftmost (lowest 'x') column if self.is_rtl {
// spanned by the cell. However, in RTL, cells // In RTL cells expand to the left, thus the
// expand to the left. Therefore, without the // position must additionally be offset by the
// offset below, cell fills would start at the // cell's width.
// rightmost visual position of a cell and extend pos.x = self.width - (dx + width);
// over to unrelated columns to the right in RTL. }
// We avoid this by ensuring the fill starts at the
// very left of the cell, even with colspan > 1.
let offset =
if self.is_rtl { -width + col } else { Abs::zero() };
let pos = Point::new(dx + offset, dy);
let size = Size::new(width, height); let size = Size::new(width, height);
let rect = Geometry::Rect(size).filled(fill); let rect = Geometry::Rect(size).filled(fill);
fills.push((pos, FrameItem::Shape(rect, self.span))); fills.push((pos, FrameItem::Shape(rect, self.span)));
@ -946,15 +1139,9 @@ impl<'a> GridLayouter<'a> {
let frame = self.layout_single_row(engine, disambiguator, first, y)?; let frame = self.layout_single_row(engine, disambiguator, first, y)?;
self.push_row(frame, y, true); self.push_row(frame, y, true);
if self if let Some(row_height) = &mut self.row_state.current_row_height {
.grid // Add to header height, as we are in a header row.
.header *row_height += first;
.as_ref()
.and_then(Repeatable::as_repeated)
.is_some_and(|header| y < header.end)
{
// Add to header height.
self.header_height += first;
} }
return Ok(()); return Ok(());
@ -963,19 +1150,21 @@ impl<'a> GridLayouter<'a> {
// Expand all but the last region. // Expand all but the last region.
// Skip the first region if the space is eaten up by an fr row. // Skip the first region if the space is eaten up by an fr row.
let len = resolved.len(); let len = resolved.len();
for ((i, region), target) in self for ((i, region), target) in
.regions self.regions
.iter() .iter()
.enumerate() .enumerate()
.zip(&mut resolved[..len - 1]) .zip(&mut resolved[..len - 1])
.skip(self.lrows.iter().any(|row| matches!(row, Row::Fr(..))) as usize) .skip(self.current.lrows.iter().any(|row| matches!(row, Row::Fr(..)))
as usize)
{ {
// Subtract header and footer heights from the region height when // Subtract header and footer heights from the region height when
// it's not the first. // it's not the first. Ignore non-repeating headers as they only
// appear on the first region by definition.
target.set_max( target.set_max(
region.y region.y
- if i > 0 { - if i > 0 {
self.header_height + self.footer_height self.current.repeating_header_height + self.current.footer_height
} else { } else {
Abs::zero() Abs::zero()
}, },
@ -1186,25 +1375,19 @@ impl<'a> GridLayouter<'a> {
let resolved = v.resolve(self.styles).relative_to(self.regions.base().y); let resolved = v.resolve(self.styles).relative_to(self.regions.base().y);
let frame = self.layout_single_row(engine, disambiguator, resolved, y)?; let frame = self.layout_single_row(engine, disambiguator, resolved, y)?;
if self if let Some(row_height) = &mut self.row_state.current_row_height {
.grid // Add to header height, as we are in a header row.
.header *row_height += resolved;
.as_ref()
.and_then(Repeatable::as_repeated)
.is_some_and(|header| y < header.end)
{
// Add to header height.
self.header_height += resolved;
} }
// Skip to fitting region, but only if we aren't part of an unbreakable // Skip to fitting region, but only if we aren't part of an unbreakable
// row group. We use 'in_last_with_offset' so our 'in_last' call // row group. We use 'may_progress_with_repeats' to stop trying if we
// properly considers that a header and a footer would be added on each // would skip to a region with the same height and where the same
// region break. // headers would be repeated.
let height = frame.height(); let height = frame.height();
while self.unbreakable_rows_left == 0 while self.unbreakable_rows_left == 0
&& !self.regions.size.y.fits(height) && !self.regions.size.y.fits(height)
&& !in_last_with_offset(self.regions, self.header_height + self.footer_height) && self.may_progress_with_repeats()
{ {
self.finish_region(engine, false)?; self.finish_region(engine, false)?;
@ -1236,10 +1419,9 @@ impl<'a> GridLayouter<'a> {
} }
let mut output = Frame::soft(Size::new(self.width, height)); let mut output = Frame::soft(Size::new(self.width, height));
let mut pos = Point::zero(); let mut offset = Point::zero();
// Reverse the column order when using RTL. for (x, &rcol) in self.rcols.iter().enumerate() {
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) {
if let Some(cell) = self.grid.cell(x, y) { if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step // Rowspans have a separate layout step
if cell.rowspan.get() == 1 { if cell.rowspan.get() == 1 {
@ -1257,25 +1439,17 @@ impl<'a> GridLayouter<'a> {
let frame = let frame =
layout_cell(cell, engine, disambiguator, self.styles, pod)? layout_cell(cell, engine, disambiguator, self.styles, pod)?
.into_frame(); .into_frame();
let mut pos = pos; let mut pos = offset;
if self.is_rtl { if self.is_rtl {
// In the grid, cell colspans expand to the right, // In RTL cells expand to the left, thus the position
// so we're at the leftmost (lowest 'x') column // must additionally be offset by the cell's width.
// spanned by the cell. However, in RTL, cells pos.x = self.width - (pos.x + width);
// expand to the left. Therefore, without the
// offset below, the cell's contents would be laid out
// starting at its rightmost visual position and extend
// over to unrelated cells to its right in RTL.
// We avoid this by ensuring the rendered cell starts at
// the very left of the cell, even with colspan > 1.
let offset = -width + rcol;
pos.x += offset;
} }
output.push_frame(pos, frame); output.push_frame(pos, frame);
} }
} }
pos.x += rcol; offset.x += rcol;
} }
Ok(output) Ok(output)
@ -1302,8 +1476,8 @@ impl<'a> GridLayouter<'a> {
pod.backlog = &heights[1..]; pod.backlog = &heights[1..];
// Layout the row. // Layout the row.
let mut pos = Point::zero(); let mut offset = Point::zero();
for (x, &rcol) in self.rcols.iter().enumerate().rev_if(self.is_rtl) { for (x, &rcol) in self.rcols.iter().enumerate() {
if let Some(cell) = self.grid.cell(x, y) { if let Some(cell) = self.grid.cell(x, y) {
// Rowspans have a separate layout step // Rowspans have a separate layout step
if cell.rowspan.get() == 1 { if cell.rowspan.get() == 1 {
@ -1314,17 +1488,19 @@ impl<'a> GridLayouter<'a> {
let fragment = let fragment =
layout_cell(cell, engine, disambiguator, self.styles, pod)?; layout_cell(cell, engine, disambiguator, self.styles, pod)?;
for (output, frame) in outputs.iter_mut().zip(fragment) { for (output, frame) in outputs.iter_mut().zip(fragment) {
let mut pos = pos; let mut pos = offset;
if self.is_rtl { if self.is_rtl {
let offset = -width + rcol; // In RTL cells expand to the left, thus the
pos.x += offset; // position must additionally be offset by the
// cell's width.
pos.x = self.width - (offset.x + width);
} }
output.push_frame(pos, frame); output.push_frame(pos, frame);
} }
} }
} }
pos.x += rcol; offset.x += rcol;
} }
Ok(Fragment::frames(outputs)) Ok(Fragment::frames(outputs))
@ -1335,8 +1511,13 @@ impl<'a> GridLayouter<'a> {
/// will be pushed for this particular row. It can be `false` for rows /// will be pushed for this particular row. It can be `false` for rows
/// spanning multiple regions. /// spanning multiple regions.
fn push_row(&mut self, frame: Frame, y: usize, is_last: bool) { fn push_row(&mut self, frame: Frame, y: usize, is_last: bool) {
if !self.row_state.in_active_repeatable {
// There is now a row after the rows equipped with orphan
// prevention, so no need to keep moving them anymore.
self.flush_orphans();
}
self.regions.size.y -= frame.height(); self.regions.size.y -= frame.height();
self.lrows.push(Row::Frame(frame, y, is_last)); self.current.lrows.push(Row::Frame(frame, y, is_last));
} }
/// Finish rows for one region. /// Finish rows for one region.
@ -1345,68 +1526,73 @@ impl<'a> GridLayouter<'a> {
engine: &mut Engine, engine: &mut Engine,
last: bool, last: bool,
) -> SourceResult<()> { ) -> SourceResult<()> {
// The latest rows have orphan prevention (headers) and no other rows
// were placed, so remove those rows and try again in a new region,
// unless this is the last region.
if let Some(orphan_snapshot) = self.current.lrows_orphan_snapshot.take() {
if !last {
self.current.lrows.truncate(orphan_snapshot);
self.current.repeated_header_rows =
self.current.repeated_header_rows.min(orphan_snapshot);
if orphan_snapshot == 0 {
// Removed all repeated headers.
self.current.last_repeated_header_end = 0;
}
}
}
if self if self
.current
.lrows .lrows
.last() .last()
.is_some_and(|row| self.grid.is_gutter_track(row.index())) .is_some_and(|row| self.grid.is_gutter_track(row.index()))
{ {
// Remove the last row in the region if it is a gutter row. // Remove the last row in the region if it is a gutter row.
self.lrows.pop().unwrap(); self.current.lrows.pop().unwrap();
self.current.repeated_header_rows =
self.current.repeated_header_rows.min(self.current.lrows.len());
} }
// If no rows other than the footer have been laid out so far, and // If no rows other than the footer have been laid out so far
// there are rows beside the footer, then don't lay it out at all. // (e.g. due to header orphan prevention), and there are rows
// This check doesn't apply, and is thus overridden, when there is a // beside the footer, then don't lay it out at all.
// header. //
let mut footer_would_be_orphan = self.lrows.is_empty() // It is worth noting that the footer is made non-repeatable at
&& !in_last_with_offset( // the grid resolving stage if it is short-lived, that is, if
self.regions, // it is at the start of the table (or right after headers at
self.header_height + self.footer_height, // the start of the table).
) //
&& self // TODO(subfooters): explicitly check for short-lived footers.
.grid // TODO(subfooters): widow prevention for non-repeated footers with a
.footer // similar mechanism / when implementing multiple footers.
.as_ref() let footer_would_be_widow = matches!(&self.grid.footer, Some(footer) if footer.repeated)
.and_then(Repeatable::as_repeated) && self.current.lrows.is_empty()
.is_some_and(|footer| footer.start != 0); && self.current.could_progress_at_top;
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
if self.grid.rows.len() > header.end
&& self
.grid
.footer
.as_ref()
.and_then(Repeatable::as_repeated)
.is_none_or(|footer| footer.start != header.end)
&& self.lrows.last().is_some_and(|row| row.index() < header.end)
&& !in_last_with_offset(
self.regions,
self.header_height + self.footer_height,
)
{
// Header and footer would be alone in this region, but there are more
// rows beyond the header and the footer. Push an empty region.
self.lrows.clear();
footer_would_be_orphan = true;
}
}
let mut laid_out_footer_start = None; let mut laid_out_footer_start = None;
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer { if !footer_would_be_widow {
// Don't layout the footer if it would be alone with the header in if let Some(footer) = &self.grid.footer {
// the page, and don't layout it twice. // Don't layout the footer if it would be alone with the header
if !footer_would_be_orphan // in the page (hence the widow check), and don't layout it
&& self.lrows.iter().all(|row| row.index() < footer.start) // twice (check below).
//
// TODO(subfooters): this check can be replaced by a vector of
// repeating footers in the future, and/or some "pending
// footers" vector for footers we're about to place.
if footer.repeated
&& self.current.lrows.iter().all(|row| row.index() < footer.start)
{ {
laid_out_footer_start = Some(footer.start); laid_out_footer_start = Some(footer.start);
self.layout_footer(footer, engine, self.finished.len())?; self.layout_footer(footer, engine, self.finished.len())?;
} }
} }
}
// Determine the height of existing rows in the region. // Determine the height of existing rows in the region.
let mut used = Abs::zero(); let mut used = Abs::zero();
let mut fr = Fr::zero(); let mut fr = Fr::zero();
for row in &self.lrows { for row in &self.current.lrows {
match row { match row {
Row::Frame(frame, _, _) => used += frame.height(), Row::Frame(frame, _, _) => used += frame.height(),
Row::Fr(v, _, _) => fr += *v, Row::Fr(v, _, _) => fr += *v,
@ -1415,9 +1601,9 @@ impl<'a> GridLayouter<'a> {
// Determine the size of the grid in this region, expanding fully if // Determine the size of the grid in this region, expanding fully if
// there are fr rows. // there are fr rows.
let mut size = Size::new(self.width, used).min(self.initial); let mut size = Size::new(self.width, used).min(self.current.initial);
if fr.get() > 0.0 && self.initial.y.is_finite() { if fr.get() > 0.0 && self.current.initial.y.is_finite() {
size.y = self.initial.y; size.y = self.current.initial.y;
} }
// The frame for the region. // The frame for the region.
@ -1425,9 +1611,10 @@ impl<'a> GridLayouter<'a> {
let mut pos = Point::zero(); let mut pos = Point::zero();
let mut rrows = vec![]; let mut rrows = vec![];
let current_region = self.finished.len(); let current_region = self.finished.len();
let mut repeated_header_row_height = Abs::zero();
// Place finished rows and layout fractional rows. // Place finished rows and layout fractional rows.
for row in std::mem::take(&mut self.lrows) { for (i, row) in std::mem::take(&mut self.current.lrows).into_iter().enumerate() {
let (frame, y, is_last) = match row { let (frame, y, is_last) = match row {
Row::Frame(frame, y, is_last) => (frame, y, is_last), Row::Frame(frame, y, is_last) => (frame, y, is_last),
Row::Fr(v, y, disambiguator) => { Row::Fr(v, y, disambiguator) => {
@ -1438,6 +1625,9 @@ impl<'a> GridLayouter<'a> {
}; };
let height = frame.height(); let height = frame.height();
if i < self.current.repeated_header_rows {
repeated_header_row_height += height;
}
// Ensure rowspans which span this row will have enough space to // Ensure rowspans which span this row will have enough space to
// be laid out over it later. // be laid out over it later.
@ -1516,7 +1706,11 @@ impl<'a> GridLayouter<'a> {
// we have to check the same index again in the next // we have to check the same index again in the next
// iteration. // iteration.
let rowspan = self.rowspans.remove(i); let rowspan = self.rowspans.remove(i);
self.layout_rowspan(rowspan, Some((&mut output, &rrows)), engine)?; self.layout_rowspan(
rowspan,
Some((&mut output, repeated_header_row_height)),
engine,
)?;
} else { } else {
i += 1; i += 1;
} }
@ -1527,21 +1721,40 @@ impl<'a> GridLayouter<'a> {
pos.y += height; pos.y += height;
} }
self.finish_region_internal(output, rrows); self.finish_region_internal(
output,
rrows,
FinishedHeaderRowInfo {
repeated_amount: self.current.repeated_header_rows,
last_repeated_header_end: self.current.last_repeated_header_end,
repeated_height: repeated_header_row_height,
},
);
if !last { if !last {
self.current.repeated_header_rows = 0;
self.current.last_repeated_header_end = 0;
self.current.repeating_header_height = Abs::zero();
self.current.repeating_header_heights.clear();
let disambiguator = self.finished.len(); let disambiguator = self.finished.len();
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer { if let Some(footer) =
self.grid.footer.as_ref().and_then(Repeatable::as_repeated)
{
self.prepare_footer(footer, engine, disambiguator)?; self.prepare_footer(footer, engine, disambiguator)?;
} }
if let Some(Repeatable::Repeated(header)) = &self.grid.header {
// Add a header to the new region.
self.layout_header(header, engine, disambiguator)?;
}
// Ensure rows don't try to overrun the footer. // Ensure rows don't try to overrun the footer.
self.regions.size.y -= self.footer_height; // Note that header layout will only subtract this again if it has
// to skip regions to fit headers, so there is no risk of
// subtracting this twice.
self.regions.size.y -= self.current.footer_height;
self.current.initial_after_repeats = self.regions.size.y;
if !self.repeating_headers.is_empty() || !self.pending_headers.is_empty() {
// Add headers to the new region.
self.layout_active_headers(engine)?;
}
} }
Ok(()) Ok(())
@ -1553,11 +1766,26 @@ impl<'a> GridLayouter<'a> {
&mut self, &mut self,
output: Frame, output: Frame,
resolved_rows: Vec<RowPiece>, resolved_rows: Vec<RowPiece>,
header_row_info: FinishedHeaderRowInfo,
) { ) {
self.finished.push(output); self.finished.push(output);
self.rrows.push(resolved_rows); self.rrows.push(resolved_rows);
self.regions.next(); self.regions.next();
self.initial = self.regions.size; self.current.initial = self.regions.size;
// Repeats haven't been laid out yet, so in the meantime, this will
// represent the initial height after repeats laid out so far, and will
// be gradually updated when preparing footers and repeating headers.
self.current.initial_after_repeats = self.current.initial.y;
self.current.could_progress_at_top = self.regions.may_progress();
if !self.grid.headers.is_empty() {
self.finished_header_rows.push(header_row_info);
}
// Ensure orphan prevention is handled before resolving rows.
debug_assert!(self.current.lrows_orphan_snapshot.is_none());
} }
} }
@ -1572,13 +1800,3 @@ pub(super) fn points(
offset offset
}) })
} }
/// Checks if the first region of a sequence of regions is the last usable
/// region, assuming that the last region will always be occupied by some
/// specific offset height, even after calling `.next()`, due to some
/// additional logic which adds content automatically on each region turn (in
/// our case, headers).
pub(super) fn in_last_with_offset(regions: Regions<'_>, offset: Abs) -> bool {
regions.backlog.is_empty()
&& regions.last.is_none_or(|height| regions.size.y + offset == height)
}

View File

@ -391,10 +391,12 @@ pub fn vline_stroke_at_row(
/// ///
/// This function assumes columns are sorted by increasing `x`, and rows are /// This function assumes columns are sorted by increasing `x`, and rows are
/// sorted by increasing `y`. /// sorted by increasing `y`.
#[allow(clippy::too_many_arguments)]
pub fn hline_stroke_at_column( pub fn hline_stroke_at_column(
grid: &CellGrid, grid: &CellGrid,
rows: &[RowPiece], rows: &[RowPiece],
local_top_y: Option<usize>, local_top_y: Option<usize>,
header_end_above: Option<usize>,
in_last_region: bool, in_last_region: bool,
y: usize, y: usize,
x: usize, x: usize,
@ -499,17 +501,15 @@ pub fn hline_stroke_at_column(
// Top border stroke and header stroke are generally prioritized, unless // Top border stroke and header stroke are generally prioritized, unless
// they don't have explicit hline overrides and one or more user-provided // they don't have explicit hline overrides and one or more user-provided
// hlines would appear at the same position, which then are prioritized. // hlines would appear at the same position, which then are prioritized.
let top_stroke_comes_from_header = grid let top_stroke_comes_from_header = header_end_above.zip(local_top_y).is_some_and(
.header |(last_repeated_header_end, local_top_y)| {
.as_ref() // Check if the last repeated header row is above this line.
.and_then(Repeatable::as_repeated) //
.zip(local_top_y) // Note that `y == last_repeated_header_end` is impossible for a
.is_some_and(|(header, local_top_y)| { // strictly repeated header (not in its original position).
// Ensure the row above us is a repeated header. local_top_y < last_repeated_header_end && y > last_repeated_header_end
// FIXME: Make this check more robust when headers at arbitrary },
// positions are added. );
local_top_y < header.end && y > header.end
});
// Prioritize the footer's top stroke as well where applicable. // Prioritize the footer's top stroke as well where applicable.
let bottom_stroke_comes_from_footer = grid let bottom_stroke_comes_from_footer = grid
@ -637,7 +637,7 @@ mod test {
}, },
vec![], vec![],
vec![], vec![],
None, vec![],
None, None,
entries, entries,
) )
@ -1175,7 +1175,7 @@ mod test {
}, },
vec![], vec![],
vec![], vec![],
None, vec![],
None, None,
entries, entries,
) )
@ -1268,6 +1268,7 @@ mod test {
grid, grid,
&rows, &rows,
y.checked_sub(1), y.checked_sub(1),
None,
true, true,
y, y,
x, x,
@ -1461,6 +1462,7 @@ mod test {
grid, grid,
&rows, &rows,
y.checked_sub(1), y.checked_sub(1),
None,
true, true,
y, y,
x, x,
@ -1506,6 +1508,7 @@ mod test {
grid, grid,
&rows, &rows,
if y == 4 { Some(2) } else { y.checked_sub(1) }, if y == 4 { Some(2) } else { y.checked_sub(1) },
None,
true, true,
y, y,
x, x,

View File

@ -1,57 +1,446 @@
use std::ops::Deref;
use typst_library::diag::SourceResult; use typst_library::diag::SourceResult;
use typst_library::engine::Engine; use typst_library::engine::Engine;
use typst_library::layout::grid::resolve::{Footer, Header, Repeatable}; use typst_library::layout::grid::resolve::{Footer, Header, Repeatable};
use typst_library::layout::{Abs, Axes, Frame, Regions}; use typst_library::layout::{Abs, Axes, Frame, Regions};
use super::layouter::GridLayouter; use super::layouter::{GridLayouter, RowState};
use super::rowspans::UnbreakableRowGroup; use super::rowspans::UnbreakableRowGroup;
impl GridLayouter<'_> { impl<'a> GridLayouter<'a> {
/// Layouts the header's rows. /// Checks whether a region break could help a situation where we're out of
/// Skips regions as necessary. /// space for the next row. The criteria are:
pub fn layout_header( ///
/// 1. If we could progress at the top of the region, that indicates the
/// region has a backlog, or (if we're at the first region) a region break
/// is at all possible (`regions.last` is `Some()`), so that's sufficient.
///
/// 2. Otherwise, we may progress if another region break is possible
/// (`regions.last` is still `Some()`) and non-repeating rows have been
/// placed, since that means the space they occupy will be available in the
/// next region.
#[inline]
pub fn may_progress_with_repeats(&self) -> bool {
// TODO(subfooters): check below isn't enough to detect non-repeating
// footers... we can also change 'initial_after_repeats' to stop being
// calculated if there were any non-repeating footers.
self.current.could_progress_at_top
|| self.regions.last.is_some()
&& self.regions.size.y != self.current.initial_after_repeats
}
pub fn place_new_headers(
&mut self,
consecutive_header_count: &mut usize,
engine: &mut Engine,
) -> SourceResult<()> {
*consecutive_header_count += 1;
let (consecutive_headers, new_upcoming_headers) =
self.upcoming_headers.split_at(*consecutive_header_count);
if new_upcoming_headers.first().is_some_and(|next_header| {
consecutive_headers.last().is_none_or(|latest_header| {
!latest_header.short_lived
&& next_header.range.start == latest_header.range.end
}) && !next_header.short_lived
}) {
// More headers coming, so wait until we reach them.
return Ok(());
}
self.upcoming_headers = new_upcoming_headers;
*consecutive_header_count = 0;
let [first_header, ..] = consecutive_headers else {
self.flush_orphans();
return Ok(());
};
// Assuming non-conflicting headers sorted by increasing y, this must
// be the header with the lowest level (sorted by increasing levels).
let first_level = first_header.level;
// Stop repeating conflicting headers, even if the new headers are
// short-lived or won't repeat.
//
// If we go to a new region before the new headers fit alongside their
// children (or in general, for short-lived), the old headers should
// not be displayed anymore.
let first_conflicting_pos =
self.repeating_headers.partition_point(|h| h.level < first_level);
self.repeating_headers.truncate(first_conflicting_pos);
// Ensure upcoming rows won't see that these headers will occupy any
// space in future regions anymore.
for removed_height in
self.current.repeating_header_heights.drain(first_conflicting_pos..)
{
self.current.repeating_header_height -= removed_height;
}
// Layout short-lived headers immediately.
if consecutive_headers.last().is_some_and(|h| h.short_lived) {
// No chance of orphans as we're immediately placing conflicting
// headers afterwards, which basically are not headers, for all intents
// and purposes. It is therefore guaranteed that all new headers have
// been placed at least once.
self.flush_orphans();
// Layout each conflicting header independently, without orphan
// prevention (as they don't go into 'pending_headers').
// These headers are short-lived as they are immediately followed by a
// header of the same or lower level, such that they never actually get
// to repeat.
self.layout_new_headers(consecutive_headers, true, engine)?;
} else {
// Let's try to place pending headers at least once.
// This might be a waste as we could generate an orphan and thus have
// to try to place old and new headers all over again, but that happens
// for every new region anyway, so it's rather unavoidable.
let snapshot_created =
self.layout_new_headers(consecutive_headers, false, engine)?;
// Queue the new headers for layout. They will remain in this
// vector due to orphan prevention.
//
// After the first subsequent row is laid out, move to repeating, as
// it's then confirmed the headers won't be moved due to orphan
// prevention anymore.
self.pending_headers = consecutive_headers;
if !snapshot_created {
// Region probably couldn't progress.
//
// Mark new pending headers as final and ensure there isn't a
// snapshot.
self.flush_orphans();
}
}
Ok(())
}
/// Lays out rows belonging to a header, returning the calculated header
/// height only for that header. Indicates to the laid out rows that they
/// should inform their laid out heights if appropriate (auto or fixed
/// size rows only).
#[inline]
fn layout_header_rows(
&mut self, &mut self,
header: &Header, header: &Header,
engine: &mut Engine, engine: &mut Engine,
disambiguator: usize, disambiguator: usize,
) -> SourceResult<()> { as_short_lived: bool,
let header_rows = ) -> SourceResult<Abs> {
self.simulate_header(header, &self.regions, engine, disambiguator)?; let mut header_height = Abs::zero();
for y in header.range.clone() {
header_height += self
.layout_row_with_state(
y,
engine,
disambiguator,
RowState {
current_row_height: Some(Abs::zero()),
in_active_repeatable: !as_short_lived,
},
)?
.current_row_height
.unwrap_or_default();
}
Ok(header_height)
}
/// This function should be called each time an additional row has been
/// laid out in a region to indicate that orphan prevention has succeeded.
///
/// It removes the current orphan snapshot and flushes pending headers,
/// such that a non-repeating header won't try to be laid out again
/// anymore, and a repeating header will begin to be part of
/// `repeating_headers`.
pub fn flush_orphans(&mut self) {
self.current.lrows_orphan_snapshot = None;
self.flush_pending_headers();
}
/// Indicates all currently pending headers have been successfully placed
/// once, since another row has been placed after them, so they are
/// certainly not orphans.
pub fn flush_pending_headers(&mut self) {
if self.pending_headers.is_empty() {
return;
}
for header in self.pending_headers {
if header.repeated {
// Vector remains sorted by increasing levels:
// - 'pending_headers' themselves are sorted, since we only
// push non-mutually-conflicting headers at a time.
// - Before pushing new pending headers in
// 'layout_new_pending_headers', we truncate repeating headers
// to remove anything with the same or higher levels as the
// first pending header.
// - Assuming it was sorted before, that truncation only keeps
// elements with a lower level.
// - Therefore, by pushing this header to the end, it will have
// a level larger than all the previous headers, and is thus
// in its 'correct' position.
self.repeating_headers.push(header);
}
}
self.pending_headers = Default::default();
}
/// Lays out the rows of repeating and pending headers at the top of the
/// region.
///
/// Assumes the footer height for the current region has already been
/// calculated. Skips regions as necessary to fit all headers and all
/// footers.
pub fn layout_active_headers(&mut self, engine: &mut Engine) -> SourceResult<()> {
// Generate different locations for content in headers across its
// repetitions by assigning a unique number for each one.
let disambiguator = self.finished.len();
let header_height = self.simulate_header_height(
self.repeating_headers
.iter()
.copied()
.chain(self.pending_headers.iter().map(Repeatable::deref)),
&self.regions,
engine,
disambiguator,
)?;
// We already take the footer into account below.
// While skipping regions, footer height won't be automatically
// re-calculated until the end.
let mut skipped_region = false; let mut skipped_region = false;
while self.unbreakable_rows_left == 0 while self.unbreakable_rows_left == 0
&& !self.regions.size.y.fits(header_rows.height + self.footer_height) && !self.regions.size.y.fits(header_height)
&& self.regions.may_progress() && self.may_progress_with_repeats()
{ {
// Advance regions without any output until we can place the // Advance regions without any output until we can place the
// header and the footer. // header and the footer.
self.finish_region_internal(Frame::soft(Axes::splat(Abs::zero())), vec![]); self.finish_region_internal(
Frame::soft(Axes::splat(Abs::zero())),
vec![],
Default::default(),
);
// TODO(layout model): re-calculate heights of headers and footers
// on each region if 'full' changes? (Assuming height doesn't
// change for now...)
//
// Would remove the footer height update below (move it here).
skipped_region = true; skipped_region = true;
self.regions.size.y -= self.current.footer_height;
self.current.initial_after_repeats = self.regions.size.y;
} }
if let Some(footer) = &self.grid.footer {
if footer.repeated && skipped_region {
// Simulate the footer again; the region's 'full' might have
// changed.
self.regions.size.y += self.current.footer_height;
self.current.footer_height = self
.simulate_footer(footer, &self.regions, engine, disambiguator)?
.height;
self.regions.size.y -= self.current.footer_height;
}
}
let repeating_header_rows =
total_header_row_count(self.repeating_headers.iter().copied());
let pending_header_rows =
total_header_row_count(self.pending_headers.iter().map(Repeatable::deref));
// Group of headers is unbreakable.
// Thus, no risk of 'finish_region' being recursively called from
// within 'layout_row'.
self.unbreakable_rows_left += repeating_header_rows + pending_header_rows;
self.current.last_repeated_header_end =
self.repeating_headers.last().map(|h| h.range.end).unwrap_or_default();
// Reset the header height for this region. // Reset the header height for this region.
// It will be re-calculated when laying out each header row. // It will be re-calculated when laying out each header row.
self.header_height = Abs::zero(); self.current.repeating_header_height = Abs::zero();
self.current.repeating_header_heights.clear();
if let Some(Repeatable::Repeated(footer)) = &self.grid.footer { debug_assert!(self.current.lrows.is_empty());
if skipped_region { debug_assert!(self.current.lrows_orphan_snapshot.is_none());
// Simulate the footer again; the region's 'full' might have let may_progress = self.may_progress_with_repeats();
// changed.
self.footer_height = self if may_progress {
.simulate_footer(footer, &self.regions, engine, disambiguator)? // Enable orphan prevention for headers at the top of the region.
.height; // Otherwise, we will flush pending headers below, after laying
// them out.
//
// It is very rare for this to make a difference as we're usually
// at the 'last' region after the first skip, at which the snapshot
// is handled by 'layout_new_headers'. Either way, we keep this
// here for correctness.
self.current.lrows_orphan_snapshot = Some(self.current.lrows.len());
}
// Use indices to avoid double borrow. We don't mutate headers in
// 'layout_row' so this is fine.
let mut i = 0;
while let Some(&header) = self.repeating_headers.get(i) {
let header_height =
self.layout_header_rows(header, engine, disambiguator, false)?;
self.current.repeating_header_height += header_height;
// We assume that this vector will be sorted according
// to increasing levels like 'repeating_headers' and
// 'pending_headers' - and, in particular, their union, as this
// vector is pushed repeating heights from both.
//
// This is guaranteed by:
// 1. We always push pending headers after repeating headers,
// as we assume they don't conflict because we remove
// conflicting repeating headers when pushing a new pending
// header.
//
// 2. We push in the same order as each.
//
// 3. This vector is also modified when pushing a new pending
// header, where we remove heights for conflicting repeating
// headers which have now stopped repeating. They are always at
// the end and new pending headers respect the existing sort,
// so the vector will remain sorted.
self.current.repeating_header_heights.push(header_height);
i += 1;
}
self.current.repeated_header_rows = self.current.lrows.len();
self.current.initial_after_repeats = self.regions.size.y;
let mut has_non_repeated_pending_header = false;
for header in self.pending_headers {
if !header.repeated {
self.current.initial_after_repeats = self.regions.size.y;
has_non_repeated_pending_header = true;
}
let header_height =
self.layout_header_rows(header, engine, disambiguator, false)?;
if header.repeated {
self.current.repeating_header_height += header_height;
self.current.repeating_header_heights.push(header_height);
} }
} }
// Header is unbreakable. if !has_non_repeated_pending_header {
// Thus, no risk of 'finish_region' being recursively called from self.current.initial_after_repeats = self.regions.size.y;
// within 'layout_row'.
self.unbreakable_rows_left += header.end;
for y in 0..header.end {
self.layout_row(y, engine, disambiguator)?;
} }
if !may_progress {
// Flush pending headers immediately, as placing them again later
// won't help.
self.flush_orphans();
}
Ok(()) Ok(())
} }
/// Lays out headers found for the first time during row layout.
///
/// If 'short_lived' is true, these headers are immediately followed by
/// a conflicting header, so it is assumed they will not be pushed to
/// pending headers.
///
/// Returns whether orphan prevention was successfully setup, or couldn't
/// due to short-lived headers or the region couldn't progress.
pub fn layout_new_headers(
&mut self,
headers: &'a [Repeatable<Header>],
short_lived: bool,
engine: &mut Engine,
) -> SourceResult<bool> {
// At first, only consider the height of the given headers. However,
// for upcoming regions, we will have to consider repeating headers as
// well.
let header_height = self.simulate_header_height(
headers.iter().map(Repeatable::deref),
&self.regions,
engine,
0,
)?;
while self.unbreakable_rows_left == 0
&& !self.regions.size.y.fits(header_height)
&& self.may_progress_with_repeats()
{
// Note that, after the first region skip, the new headers will go
// at the top of the region, but after the repeating headers that
// remained (which will be automatically placed in 'finish_region').
self.finish_region(engine, false)?;
}
// Remove new headers at the end of the region if the upcoming row
// doesn't fit.
// TODO(subfooters): what if there is a footer right after it?
let should_snapshot = !short_lived
&& self.current.lrows_orphan_snapshot.is_none()
&& self.may_progress_with_repeats();
if should_snapshot {
// If we don't enter this branch while laying out non-short lived
// headers, that means we will have to immediately flush pending
// headers and mark them as final, since trying to place them in
// the next page won't help get more space.
self.current.lrows_orphan_snapshot = Some(self.current.lrows.len());
}
let mut at_top = self.regions.size.y == self.current.initial_after_repeats;
self.unbreakable_rows_left +=
total_header_row_count(headers.iter().map(Repeatable::deref));
for header in headers {
let header_height = self.layout_header_rows(header, engine, 0, false)?;
// Only store this header height if it is actually going to
// become a pending header. Otherwise, pretend it's not a
// header... This is fine for consumers of 'header_height' as
// it is guaranteed this header won't appear in a future
// region, so multi-page rows and cells can effectively ignore
// this header.
if !short_lived && header.repeated {
self.current.repeating_header_height += header_height;
self.current.repeating_header_heights.push(header_height);
if at_top {
self.current.initial_after_repeats = self.regions.size.y;
}
} else {
at_top = false;
}
}
Ok(should_snapshot)
}
/// Calculates the total expected height of several headers.
pub fn simulate_header_height<'h: 'a>(
&self,
headers: impl IntoIterator<Item = &'h Header>,
regions: &Regions<'_>,
engine: &mut Engine,
disambiguator: usize,
) -> SourceResult<Abs> {
let mut height = Abs::zero();
for header in headers {
height +=
self.simulate_header(header, regions, engine, disambiguator)?.height;
}
Ok(height)
}
/// Simulate the header's group of rows. /// Simulate the header's group of rows.
pub fn simulate_header( pub fn simulate_header(
&self, &self,
@ -66,8 +455,8 @@ impl GridLayouter<'_> {
// assume that the amount of unbreakable rows following the first row // assume that the amount of unbreakable rows following the first row
// in the header will be precisely the rows in the header. // in the header will be precisely the rows in the header.
self.simulate_unbreakable_row_group( self.simulate_unbreakable_row_group(
0, header.range.start,
Some(header.end), Some(header.range.end - header.range.start),
regions, regions,
engine, engine,
disambiguator, disambiguator,
@ -91,11 +480,22 @@ impl GridLayouter<'_> {
{ {
// Advance regions without any output until we can place the // Advance regions without any output until we can place the
// footer. // footer.
self.finish_region_internal(Frame::soft(Axes::splat(Abs::zero())), vec![]); self.finish_region_internal(
Frame::soft(Axes::splat(Abs::zero())),
vec![],
Default::default(),
);
skipped_region = true; skipped_region = true;
} }
self.footer_height = if skipped_region { // TODO(subfooters): Consider resetting header height etc. if we skip
// region. (Maybe move that step to `finish_region_internal`.)
//
// That is unnecessary at the moment as 'prepare_footers' is only
// called at the start of the region, so header height is always zero
// and no headers were placed so far, but what about when we can have
// footers in the middle of the region? Let's think about this then.
self.current.footer_height = if skipped_region {
// Simulate the footer again; the region's 'full' might have // Simulate the footer again; the region's 'full' might have
// changed. // changed.
self.simulate_footer(footer, &self.regions, engine, disambiguator)? self.simulate_footer(footer, &self.regions, engine, disambiguator)?
@ -118,12 +518,22 @@ impl GridLayouter<'_> {
// Ensure footer rows have their own height available. // Ensure footer rows have their own height available.
// Won't change much as we're creating an unbreakable row group // Won't change much as we're creating an unbreakable row group
// anyway, so this is mostly for correctness. // anyway, so this is mostly for correctness.
self.regions.size.y += self.footer_height; self.regions.size.y += self.current.footer_height;
let repeats = self.grid.footer.as_ref().is_some_and(|f| f.repeated);
let footer_len = self.grid.rows.len() - footer.start; let footer_len = self.grid.rows.len() - footer.start;
self.unbreakable_rows_left += footer_len; self.unbreakable_rows_left += footer_len;
for y in footer.start..self.grid.rows.len() { for y in footer.start..self.grid.rows.len() {
self.layout_row(y, engine, disambiguator)?; self.layout_row_with_state(
y,
engine,
disambiguator,
RowState {
in_active_repeatable: repeats,
..Default::default()
},
)?;
} }
Ok(()) Ok(())
@ -144,10 +554,18 @@ impl GridLayouter<'_> {
// in the footer will be precisely the rows in the footer. // in the footer will be precisely the rows in the footer.
self.simulate_unbreakable_row_group( self.simulate_unbreakable_row_group(
footer.start, footer.start,
Some(self.grid.rows.len() - footer.start), Some(footer.end - footer.start),
regions, regions,
engine, engine,
disambiguator, disambiguator,
) )
} }
} }
/// The total amount of rows in the given list of headers.
#[inline]
pub fn total_header_row_count<'h>(
headers: impl IntoIterator<Item = &'h Header>,
) -> usize {
headers.into_iter().map(|h| h.range.end - h.range.start).sum()
}

View File

@ -3,9 +3,8 @@ use typst_library::engine::Engine;
use typst_library::foundations::Resolve; use typst_library::foundations::Resolve;
use typst_library::layout::grid::resolve::Repeatable; use typst_library::layout::grid::resolve::Repeatable;
use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing}; use typst_library::layout::{Abs, Axes, Frame, Point, Region, Regions, Size, Sizing};
use typst_utils::MaybeReverseIter;
use super::layouter::{in_last_with_offset, points, Row, RowPiece}; use super::layouter::{points, Row};
use super::{layout_cell, Cell, GridLayouter}; use super::{layout_cell, Cell, GridLayouter};
/// All information needed to layout a single rowspan. /// All information needed to layout a single rowspan.
@ -23,6 +22,10 @@ pub struct Rowspan {
/// specified for the parent cell's `breakable` field. /// specified for the parent cell's `breakable` field.
pub is_effectively_unbreakable: bool, pub is_effectively_unbreakable: bool,
/// The horizontal offset of this rowspan in all regions. /// The horizontal offset of this rowspan in all regions.
///
/// This is the offset from the text direction start, meaning that, on RTL
/// grids, this is the offset from the right of the grid, whereas, on LTR
/// grids, it is the offset from the left.
pub dx: Abs, pub dx: Abs,
/// The vertical offset of this rowspan in the first region. /// The vertical offset of this rowspan in the first region.
pub dy: Abs, pub dy: Abs,
@ -87,10 +90,10 @@ pub struct CellMeasurementData<'layouter> {
impl GridLayouter<'_> { impl GridLayouter<'_> {
/// Layout a rowspan over the already finished regions, plus the current /// Layout a rowspan over the already finished regions, plus the current
/// region's frame and resolved rows, if it wasn't finished yet (because /// region's frame and height of resolved header rows, if it wasn't
/// we're being called from `finish_region`, but note that this function is /// finished yet (because we're being called from `finish_region`, but note
/// also called once after all regions are finished, in which case /// that this function is also called once after all regions are finished,
/// `current_region_data` is `None`). /// in which case `current_region_data` is `None`).
/// ///
/// We need to do this only once we already know the heights of all /// We need to do this only once we already know the heights of all
/// spanned rows, which is only possible after laying out the last row /// spanned rows, which is only possible after laying out the last row
@ -98,7 +101,7 @@ impl GridLayouter<'_> {
pub fn layout_rowspan( pub fn layout_rowspan(
&mut self, &mut self,
rowspan_data: Rowspan, rowspan_data: Rowspan,
current_region_data: Option<(&mut Frame, &[RowPiece])>, current_region_data: Option<(&mut Frame, Abs)>,
engine: &mut Engine, engine: &mut Engine,
) -> SourceResult<()> { ) -> SourceResult<()> {
let Rowspan { let Rowspan {
@ -118,10 +121,11 @@ impl GridLayouter<'_> {
// Nothing to layout. // Nothing to layout.
return Ok(()); return Ok(());
}; };
let first_column = self.rcols[x];
let cell = self.grid.cell(x, y).unwrap(); let cell = self.grid.cell(x, y).unwrap();
let width = self.cell_spanned_width(cell, x); let width = self.cell_spanned_width(cell, x);
let dx = if self.is_rtl { dx - width + first_column } else { dx }; // In RTL cells expand to the left, thus the position
// must additionally be offset by the cell's width.
let dx = if self.is_rtl { self.width - (dx + width) } else { dx };
// Prepare regions. // Prepare regions.
let size = Size::new(width, *first_height); let size = Size::new(width, *first_height);
@ -142,11 +146,31 @@ impl GridLayouter<'_> {
// Push the layouted frames directly into the finished frames. // Push the layouted frames directly into the finished frames.
let fragment = layout_cell(cell, engine, disambiguator, self.styles, pod)?; let fragment = layout_cell(cell, engine, disambiguator, self.styles, pod)?;
let (current_region, current_rrows) = current_region_data.unzip(); let (current_region, current_header_row_height) = current_region_data.unzip();
for ((i, finished), frame) in self
// Clever trick to process finished header rows:
// - If there are grid headers, the vector will be filled with one
// finished header row height per region, so, chaining with the height
// for the current one, we get the header row height for each region.
//
// - But if there are no grid headers, the vector will be empty, so in
// theory the regions and resolved header row heights wouldn't match.
// But that's fine - 'current_header_row_height' can only be either
// 'Some(zero)' or 'None' in such a case, and for all other rows we
// append infinite zeros. That is, in such a case, the resolved header
// row height is always zero, so that's our fallback.
let finished_header_rows = self
.finished_header_rows
.iter()
.map(|info| info.repeated_height)
.chain(current_header_row_height)
.chain(std::iter::repeat(Abs::zero()));
for ((i, (finished, header_dy)), frame) in self
.finished .finished
.iter_mut() .iter_mut()
.chain(current_region.into_iter()) .chain(current_region.into_iter())
.zip(finished_header_rows)
.skip(first_region) .skip(first_region)
.enumerate() .enumerate()
.zip(fragment) .zip(fragment)
@ -158,22 +182,9 @@ impl GridLayouter<'_> {
} else { } else {
// The rowspan continuation starts after the header (thus, // The rowspan continuation starts after the header (thus,
// at a position after the sum of the laid out header // at a position after the sum of the laid out header
// rows). // rows). Without a header, this is zero, so the rowspan can
if let Some(Repeatable::Repeated(header)) = &self.grid.header { // start at the very top of the region as usual.
let header_rows = self header_dy
.rrows
.get(i)
.map(Vec::as_slice)
.or(current_rrows)
.unwrap_or(&[])
.iter()
.take_while(|row| row.y < header.end);
header_rows.map(|row| row.height).sum()
} else {
// Without a header, start at the very top of the region.
Abs::zero()
}
}; };
finished.push_frame(Point::new(dx, dy), frame); finished.push_frame(Point::new(dx, dy), frame);
@ -185,10 +196,8 @@ impl GridLayouter<'_> {
/// Checks if a row contains the beginning of one or more rowspan cells. /// Checks if a row contains the beginning of one or more rowspan cells.
/// If so, adds them to the rowspans vector. /// If so, adds them to the rowspans vector.
pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) { pub fn check_for_rowspans(&mut self, disambiguator: usize, y: usize) {
// We will compute the horizontal offset of each rowspan in advance. let offsets = points(self.rcols.iter().copied());
// For that reason, we must reverse the column order when using RTL. for (x, dx) in (0..self.rcols.len()).zip(offsets) {
let offsets = points(self.rcols.iter().copied().rev_if(self.is_rtl));
for (x, dx) in (0..self.rcols.len()).rev_if(self.is_rtl).zip(offsets) {
let Some(cell) = self.grid.cell(x, y) else { let Some(cell) = self.grid.cell(x, y) else {
continue; continue;
}; };
@ -229,15 +238,13 @@ impl GridLayouter<'_> {
// current row is dynamic and depends on the amount of upcoming // current row is dynamic and depends on the amount of upcoming
// unbreakable cells (with or without a rowspan setting). // unbreakable cells (with or without a rowspan setting).
let mut amount_unbreakable_rows = None; let mut amount_unbreakable_rows = None;
if let Some(Repeatable::NotRepeated(header)) = &self.grid.header { if let Some(footer) = &self.grid.footer {
if current_row < header.end { if !footer.repeated && current_row >= footer.start {
// Non-repeated header, so keep it unbreakable.
amount_unbreakable_rows = Some(header.end);
}
}
if let Some(Repeatable::NotRepeated(footer)) = &self.grid.footer {
if current_row >= footer.start {
// Non-repeated footer, so keep it unbreakable. // Non-repeated footer, so keep it unbreakable.
//
// TODO(subfooters): This will become unnecessary
// once non-repeated footers are treated differently and
// have widow prevention.
amount_unbreakable_rows = Some(self.grid.rows.len() - footer.start); amount_unbreakable_rows = Some(self.grid.rows.len() - footer.start);
} }
} }
@ -252,10 +259,7 @@ impl GridLayouter<'_> {
// Skip to fitting region. // Skip to fitting region.
while !self.regions.size.y.fits(row_group.height) while !self.regions.size.y.fits(row_group.height)
&& !in_last_with_offset( && self.may_progress_with_repeats()
self.regions,
self.header_height + self.footer_height,
)
{ {
self.finish_region(engine, false)?; self.finish_region(engine, false)?;
} }
@ -394,16 +398,29 @@ impl GridLayouter<'_> {
// auto rows don't depend on the backlog, as they only span one // auto rows don't depend on the backlog, as they only span one
// region. // region.
if breakable if breakable
&& (matches!(self.grid.header, Some(Repeatable::Repeated(_))) && (!self.repeating_headers.is_empty()
|| matches!(self.grid.footer, Some(Repeatable::Repeated(_)))) || !self.pending_headers.is_empty()
|| matches!(&self.grid.footer, Some(footer) if footer.repeated))
{ {
// Subtract header and footer height from all upcoming regions // Subtract header and footer height from all upcoming regions
// when measuring the cell, including the last repeated region. // when measuring the cell, including the last repeated region.
// //
// This will update the 'custom_backlog' vector with the // This will update the 'custom_backlog' vector with the
// updated heights of the upcoming regions. // updated heights of the upcoming regions.
//
// We predict that header height will only include that of
// repeating headers, as we can assume non-repeating headers in
// the first region have been successfully placed, unless
// something didn't fit on the first region of the auto row,
// but we will only find that out after measurement, and if
// that happens, we discard the measurement and try again.
let mapped_regions = self.regions.map(&mut custom_backlog, |size| { let mapped_regions = self.regions.map(&mut custom_backlog, |size| {
Size::new(size.x, size.y - self.header_height - self.footer_height) Size::new(
size.x,
size.y
- self.current.repeating_header_height
- self.current.footer_height,
)
}); });
// Callees must use the custom backlog instead of the current // Callees must use the custom backlog instead of the current
@ -457,6 +474,7 @@ impl GridLayouter<'_> {
// Height of the rowspan covered by spanned rows in the current // Height of the rowspan covered by spanned rows in the current
// region. // region.
let laid_out_height: Abs = self let laid_out_height: Abs = self
.current
.lrows .lrows
.iter() .iter()
.filter_map(|row| match row { .filter_map(|row| match row {
@ -504,7 +522,12 @@ impl GridLayouter<'_> {
.iter() .iter()
.copied() .copied()
.chain(std::iter::once(if breakable { .chain(std::iter::once(if breakable {
self.initial.y - self.header_height - self.footer_height // Here we are calculating the available height for a
// rowspan from the top of the current region, so
// we have to use initial header heights (note that
// header height can change in the middle of the
// region).
self.current.initial_after_repeats
} else { } else {
// When measuring unbreakable auto rows, infinite // When measuring unbreakable auto rows, infinite
// height is available for content to expand. // height is available for content to expand.
@ -516,11 +539,13 @@ impl GridLayouter<'_> {
// rowspan's already laid out heights with the current // rowspan's already laid out heights with the current
// region's height and current backlog to ensure a good // region's height and current backlog to ensure a good
// level of accuracy in the measurements. // level of accuracy in the measurements.
let backlog = self //
.regions // Assume only repeating headers will survive starting at
.backlog // the next region.
.iter() let backlog = self.regions.backlog.iter().map(|&size| {
.map(|&size| size - self.header_height - self.footer_height); size - self.current.repeating_header_height
- self.current.footer_height
});
heights_up_to_current_region.chain(backlog).collect::<Vec<_>>() heights_up_to_current_region.chain(backlog).collect::<Vec<_>>()
} else { } else {
@ -534,10 +559,10 @@ impl GridLayouter<'_> {
height = *rowspan_height; height = *rowspan_height;
backlog = None; backlog = None;
full = rowspan_full; full = rowspan_full;
last = self last = self.regions.last.map(|size| {
.regions size - self.current.repeating_header_height
.last - self.current.footer_height
.map(|size| size - self.header_height - self.footer_height); });
} else { } else {
// The rowspan started in the current region, as its vector // The rowspan started in the current region, as its vector
// of heights in regions is currently empty. // of heights in regions is currently empty.
@ -739,10 +764,11 @@ impl GridLayouter<'_> {
simulated_regions.next(); simulated_regions.next();
disambiguator += 1; disambiguator += 1;
// Subtract the initial header and footer height, since that's the // Subtract the repeating header and footer height, since that's
// height we used when subtracting from the region backlog's // the height we used when subtracting from the region backlog's
// heights while measuring cells. // heights while measuring cells.
simulated_regions.size.y -= self.header_height + self.footer_height; simulated_regions.size.y -=
self.current.repeating_header_height + self.current.footer_height;
} }
if let Some(original_last_resolved_size) = last_resolved_size { if let Some(original_last_resolved_size) = last_resolved_size {
@ -874,12 +900,8 @@ impl GridLayouter<'_> {
// which, when used and combined with upcoming spanned rows, covers all // which, when used and combined with upcoming spanned rows, covers all
// of the requested rowspan height, we give up. // of the requested rowspan height, we give up.
for _attempt in 0..5 { for _attempt in 0..5 {
let rowspan_simulator = RowspanSimulator::new( let rowspan_simulator =
disambiguator, RowspanSimulator::new(disambiguator, simulated_regions, &self.current);
simulated_regions,
self.header_height,
self.footer_height,
);
let total_spanned_height = rowspan_simulator.simulate_rowspan_layout( let total_spanned_height = rowspan_simulator.simulate_rowspan_layout(
y, y,
@ -961,7 +983,8 @@ impl GridLayouter<'_> {
{ {
extra_amount_to_grow -= simulated_regions.size.y.max(Abs::zero()); extra_amount_to_grow -= simulated_regions.size.y.max(Abs::zero());
simulated_regions.next(); simulated_regions.next();
simulated_regions.size.y -= self.header_height + self.footer_height; simulated_regions.size.y -=
self.current.repeating_header_height + self.current.footer_height;
disambiguator += 1; disambiguator += 1;
} }
simulated_regions.size.y -= extra_amount_to_grow; simulated_regions.size.y -= extra_amount_to_grow;
@ -978,10 +1001,17 @@ struct RowspanSimulator<'a> {
finished: usize, finished: usize,
/// The state of regions during the simulation. /// The state of regions during the simulation.
regions: Regions<'a>, regions: Regions<'a>,
/// The height of the header in the currently simulated region. /// The total height of headers in the currently simulated region.
header_height: Abs, header_height: Abs,
/// The height of the footer in the currently simulated region. /// The total height of footers in the currently simulated region.
footer_height: Abs, footer_height: Abs,
/// Whether `self.regions.may_progress()` was `true` at the top of the
/// region, indicating we can progress anywhere in the current region,
/// even right after a repeated header.
could_progress_at_top: bool,
/// Available height after laying out repeated headers at the top of the
/// currently simulated region.
initial_after_repeats: Abs,
/// The total spanned height so far in the simulation. /// The total spanned height so far in the simulation.
total_spanned_height: Abs, total_spanned_height: Abs,
/// Height of the latest spanned gutter row in the simulation. /// Height of the latest spanned gutter row in the simulation.
@ -995,14 +1025,19 @@ impl<'a> RowspanSimulator<'a> {
fn new( fn new(
finished: usize, finished: usize,
regions: Regions<'a>, regions: Regions<'a>,
header_height: Abs, current: &super::layouter::Current,
footer_height: Abs,
) -> Self { ) -> Self {
Self { Self {
finished, finished,
regions, regions,
header_height, // There can be no new headers or footers within a multi-page
footer_height, // rowspan, since headers and footers are unbreakable, so
// assuming the repeating header height and footer height
// won't change is safe.
header_height: current.repeating_header_height,
footer_height: current.footer_height,
could_progress_at_top: current.could_progress_at_top,
initial_after_repeats: current.initial_after_repeats,
total_spanned_height: Abs::zero(), total_spanned_height: Abs::zero(),
latest_spanned_gutter_height: Abs::zero(), latest_spanned_gutter_height: Abs::zero(),
} }
@ -1051,10 +1086,7 @@ impl<'a> RowspanSimulator<'a> {
0, 0,
)?; )?;
while !self.regions.size.y.fits(row_group.height) while !self.regions.size.y.fits(row_group.height)
&& !in_last_with_offset( && self.may_progress_with_repeats()
self.regions,
self.header_height + self.footer_height,
)
{ {
self.finish_region(layouter, engine)?; self.finish_region(layouter, engine)?;
} }
@ -1076,10 +1108,7 @@ impl<'a> RowspanSimulator<'a> {
let mut skipped_region = false; let mut skipped_region = false;
while unbreakable_rows_left == 0 while unbreakable_rows_left == 0
&& !self.regions.size.y.fits(height) && !self.regions.size.y.fits(height)
&& !in_last_with_offset( && self.may_progress_with_repeats()
self.regions,
self.header_height + self.footer_height,
)
{ {
self.finish_region(layouter, engine)?; self.finish_region(layouter, engine)?;
@ -1125,17 +1154,31 @@ impl<'a> RowspanSimulator<'a> {
// our simulation checks what happens AFTER the auto row, so we can // our simulation checks what happens AFTER the auto row, so we can
// just use the original backlog from `self.regions`. // just use the original backlog from `self.regions`.
let disambiguator = self.finished; let disambiguator = self.finished;
let header_height =
if let Some(Repeatable::Repeated(header)) = &layouter.grid.header { let (repeating_headers, header_height) = if !layouter.repeating_headers.is_empty()
layouter || !layouter.pending_headers.is_empty()
.simulate_header(header, &self.regions, engine, disambiguator)? {
.height // Only repeating headers have survived after the first region
// break.
let repeating_headers = layouter.repeating_headers.iter().copied().chain(
layouter.pending_headers.iter().filter_map(Repeatable::as_repeated),
);
let header_height = layouter.simulate_header_height(
repeating_headers.clone(),
&self.regions,
engine,
disambiguator,
)?;
(Some(repeating_headers), header_height)
} else { } else {
Abs::zero() (None, Abs::zero())
}; };
let footer_height = let footer_height = if let Some(footer) =
if let Some(Repeatable::Repeated(footer)) = &layouter.grid.footer { layouter.grid.footer.as_ref().and_then(Repeatable::as_repeated)
{
layouter layouter
.simulate_footer(footer, &self.regions, engine, disambiguator)? .simulate_footer(footer, &self.regions, engine, disambiguator)?
.height .height
@ -1154,19 +1197,24 @@ impl<'a> RowspanSimulator<'a> {
skipped_region = true; skipped_region = true;
} }
if let Some(Repeatable::Repeated(header)) = &layouter.grid.header { if let Some(repeating_headers) = repeating_headers {
self.header_height = if skipped_region { self.header_height = if skipped_region {
// Simulate headers again, at the new region, as // Simulate headers again, at the new region, as
// the full region height may change. // the full region height may change.
layouter layouter.simulate_header_height(
.simulate_header(header, &self.regions, engine, disambiguator)? repeating_headers,
.height &self.regions,
engine,
disambiguator,
)?
} else { } else {
header_height header_height
}; };
} }
if let Some(Repeatable::Repeated(footer)) = &layouter.grid.footer { if let Some(footer) =
layouter.grid.footer.as_ref().and_then(Repeatable::as_repeated)
{
self.footer_height = if skipped_region { self.footer_height = if skipped_region {
// Simulate footers again, at the new region, as // Simulate footers again, at the new region, as
// the full region height may change. // the full region height may change.
@ -1183,6 +1231,7 @@ impl<'a> RowspanSimulator<'a> {
// header or footer (as an invariant, any rowspans spanning any header // header or footer (as an invariant, any rowspans spanning any header
// or footer rows are fully contained within that header's or footer's rows). // or footer rows are fully contained within that header's or footer's rows).
self.regions.size.y -= self.header_height + self.footer_height; self.regions.size.y -= self.header_height + self.footer_height;
self.initial_after_repeats = self.regions.size.y;
Ok(()) Ok(())
} }
@ -1199,8 +1248,18 @@ impl<'a> RowspanSimulator<'a> {
self.regions.next(); self.regions.next();
self.finished += 1; self.finished += 1;
self.could_progress_at_top = self.regions.may_progress();
self.simulate_header_footer_layout(layouter, engine) self.simulate_header_footer_layout(layouter, engine)
} }
/// Similar to [`GridLayouter::may_progress_with_repeats`] but for rowspan
/// simulation.
#[inline]
fn may_progress_with_repeats(&self) -> bool {
self.could_progress_at_top
|| self.regions.last.is_some()
&& self.regions.size.y != self.initial_after_repeats
}
} }
/// Subtracts some size from the end of a vector of sizes. /// Subtracts some size from the end of a vector of sizes.

View File

@ -1,6 +1,6 @@
use std::ffi::OsStr; use std::ffi::OsStr;
use typst_library::diag::{warning, At, SourceResult, StrResult}; use typst_library::diag::{warning, At, LoadedWithin, SourceResult, StrResult};
use typst_library::engine::Engine; use typst_library::engine::Engine;
use typst_library::foundations::{Bytes, Derived, Packed, Smart, StyleChain}; use typst_library::foundations::{Bytes, Derived, Packed, Smart, StyleChain};
use typst_library::introspection::Locator; use typst_library::introspection::Locator;
@ -27,17 +27,17 @@ pub fn layout_image(
// Take the format that was explicitly defined, or parse the extension, // Take the format that was explicitly defined, or parse the extension,
// or try to detect the format. // or try to detect the format.
let Derived { source, derived: data } = &elem.source; let Derived { source, derived: loaded } = &elem.source;
let format = match elem.format(styles) { let format = match elem.format(styles) {
Smart::Custom(v) => v, Smart::Custom(v) => v,
Smart::Auto => determine_format(source, data).at(span)?, Smart::Auto => determine_format(source, &loaded.data).at(span)?,
}; };
// Warn the user if the image contains a foreign object. Not perfect // Warn the user if the image contains a foreign object. Not perfect
// because the svg could also be encoded, but that's an edge case. // because the svg could also be encoded, but that's an edge case.
if format == ImageFormat::Vector(VectorFormat::Svg) { if format == ImageFormat::Vector(VectorFormat::Svg) {
let has_foreign_object = let has_foreign_object =
data.as_str().is_ok_and(|s| s.contains("<foreignObject")); memchr::memmem::find(&loaded.data, b"<foreignObject").is_some();
if has_foreign_object { if has_foreign_object {
engine.sink.warn(warning!( engine.sink.warn(warning!(
@ -53,7 +53,7 @@ pub fn layout_image(
let kind = match format { let kind = match format {
ImageFormat::Raster(format) => ImageKind::Raster( ImageFormat::Raster(format) => ImageKind::Raster(
RasterImage::new( RasterImage::new(
data.clone(), loaded.data.clone(),
format, format,
elem.icc(styles).as_ref().map(|icc| icc.derived.clone()), elem.icc(styles).as_ref().map(|icc| icc.derived.clone()),
) )
@ -61,11 +61,11 @@ pub fn layout_image(
), ),
ImageFormat::Vector(VectorFormat::Svg) => ImageKind::Svg( ImageFormat::Vector(VectorFormat::Svg) => ImageKind::Svg(
SvgImage::with_fonts( SvgImage::with_fonts(
data.clone(), loaded.data.clone(),
engine.world, engine.world,
&families(styles).map(|f| f.as_str()).collect::<Vec<_>>(), &families(styles).map(|f| f.as_str()).collect::<Vec<_>>(),
) )
.at(span)?, .within(loaded)?,
), ),
}; };
@ -147,6 +147,7 @@ fn determine_format(source: &DataSource, data: &Bytes) -> StrResult<ImageFormat>
"jpg" | "jpeg" => return Ok(ExchangeFormat::Jpg.into()), "jpg" | "jpeg" => return Ok(ExchangeFormat::Jpg.into()),
"gif" => return Ok(ExchangeFormat::Gif.into()), "gif" => return Ok(ExchangeFormat::Gif.into()),
"svg" | "svgz" => return Ok(VectorFormat::Svg.into()), "svg" | "svgz" => return Ok(VectorFormat::Svg.into()),
"webp" => return Ok(ExchangeFormat::Webp.into()),
_ => {} _ => {}
} }
} }

View File

@ -690,13 +690,34 @@ fn breakpoints(p: &Preparation, mut f: impl FnMut(usize, Breakpoint)) {
let breakpoint = if point == text.len() { let breakpoint = if point == text.len() {
Breakpoint::Mandatory Breakpoint::Mandatory
} else { } else {
const OBJ_REPLACE: char = '\u{FFFC}';
match lb.get(c) { match lb.get(c) {
// Fix for: https://github.com/unicode-org/icu4x/issues/4146
LineBreak::Glue | LineBreak::WordJoiner | LineBreak::ZWJ => continue,
LineBreak::MandatoryBreak LineBreak::MandatoryBreak
| LineBreak::CarriageReturn | LineBreak::CarriageReturn
| LineBreak::LineFeed | LineBreak::LineFeed
| LineBreak::NextLine => Breakpoint::Mandatory, | LineBreak::NextLine => Breakpoint::Mandatory,
// https://github.com/typst/typst/issues/5489
//
// OBJECT-REPLACEMENT-CHARACTERs provide Contingent Break
// opportunities before and after by default. This behaviour
// is however tailorable, see:
// https://www.unicode.org/reports/tr14/#CB
// https://www.unicode.org/reports/tr14/#TailorableBreakingRules
// https://www.unicode.org/reports/tr14/#LB20
//
// Don't provide a line breaking opportunity between a LTR-
// ISOLATE (or any other Combining Mark) and an OBJECT-
// REPLACEMENT-CHARACTER representing an inline item, if the
// LTR-ISOLATE could end up as the only character on the
// previous line.
LineBreak::CombiningMark
if text[point..].starts_with(OBJ_REPLACE)
&& last + c.len_utf8() == point =>
{
continue;
}
_ => Breakpoint::Normal, _ => Breakpoint::Normal,
} }
}; };

View File

@ -9,6 +9,7 @@ mod prepare;
mod shaping; mod shaping;
pub use self::box_::layout_box; pub use self::box_::layout_box;
pub use self::shaping::create_shape_plan;
use comemo::{Track, Tracked, TrackedMut}; use comemo::{Track, Tracked, TrackedMut};
use typst_library::diag::SourceResult; use typst_library::diag::SourceResult;

View File

@ -1,18 +1,16 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use az::SaturatingAs; use az::SaturatingAs;
use ecow::EcoString;
use rustybuzz::{BufferFlags, ShapePlan, UnicodeBuffer}; use rustybuzz::{BufferFlags, ShapePlan, UnicodeBuffer};
use ttf_parser::Tag; use ttf_parser::Tag;
use typst_library::engine::Engine; use typst_library::engine::Engine;
use typst_library::foundations::{Smart, StyleChain}; use typst_library::foundations::{Smart, StyleChain};
use typst_library::layout::{Abs, Dir, Em, Frame, FrameItem, Point, Size}; use typst_library::layout::{Abs, Dir, Em, Frame, FrameItem, Point, Size};
use typst_library::text::{ use typst_library::text::{
families, features, is_default_ignorable, variant, Font, FontFamily, FontVariant, families, features, is_default_ignorable, language, variant, Font, FontFamily,
Glyph, Lang, Region, TextEdgeBounds, TextElem, TextItem, FontVariant, Glyph, Lang, Region, TextEdgeBounds, TextElem, TextItem,
}; };
use typst_library::World; use typst_library::World;
use typst_utils::SliceExt; use typst_utils::SliceExt;
@ -20,7 +18,7 @@ use unicode_bidi::{BidiInfo, Level as BidiLevel};
use unicode_script::{Script, UnicodeScript}; use unicode_script::{Script, UnicodeScript};
use super::{decorate, Item, Range, SpanMapper}; use super::{decorate, Item, Range, SpanMapper};
use crate::modifiers::{FrameModifiers, FrameModify}; use crate::modifiers::FrameModifyText;
/// The result of shaping text. /// The result of shaping text.
/// ///
@ -295,6 +293,8 @@ impl<'a> ShapedText<'a> {
+ justification_left + justification_left
+ justification_right, + justification_right,
x_offset: shaped.x_offset + justification_left, x_offset: shaped.x_offset + justification_left,
y_advance: Em::zero(),
y_offset: Em::zero(),
range: (shaped.range.start - range.start).saturating_as() range: (shaped.range.start - range.start).saturating_as()
..(shaped.range.end - range.start).saturating_as(), ..(shaped.range.end - range.start).saturating_as(),
span, span,
@ -327,7 +327,7 @@ impl<'a> ShapedText<'a> {
offset += width; offset += width;
} }
frame.modify(&FrameModifiers::get_in(self.styles)); frame.modify_text(self.styles);
frame frame
} }
@ -824,12 +824,42 @@ fn shape_segment<'a>(
// Add the glyph to the shaped output. // Add the glyph to the shaped output.
if info.glyph_id != 0 && is_covered(cluster) { if info.glyph_id != 0 && is_covered(cluster) {
// Determine the text range of the glyph. // Assume we have the following sequence of (glyph_id, cluster):
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
// (63, 13), (193, 25), (80, 25), (3, 31)
//
// We then want the sequence of (glyph_id, text_range) to look as follows:
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
//
// Each glyph in the same cluster should be assigned the full text
// range. This is necessary because only this way krilla can
// properly assign `ActualText` attributes in complex shaping
// scenarios.
// The start of the glyph's text range.
let start = base + cluster; let start = base + cluster;
let end = base
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) } // Determine the end of the glyph's text range.
.and_then(|last| infos.get(last)) let mut k = i;
.map_or(text.len(), |info| info.cluster as usize); let step: isize = if ltr { 1 } else { -1 };
let end = loop {
// If we've reached the end of the glyphs, the `end` of the
// range should be the end of the full text.
let Some((next, next_info)) = k
.checked_add_signed(step)
.and_then(|n| infos.get(n).map(|info| (n, info)))
else {
break base + text.len();
};
// If the cluster doesn't match anymore, we've reached the end.
if next_info.cluster != info.cluster {
break base + next_info.cluster as usize;
}
k = next;
};
let c = text[cluster..].chars().next().unwrap(); let c = text[cluster..].chars().next().unwrap();
let script = c.script(); let script = c.script();
@ -904,7 +934,7 @@ fn shape_segment<'a>(
/// Create a shape plan. /// Create a shape plan.
#[comemo::memoize] #[comemo::memoize]
fn create_shape_plan( pub fn create_shape_plan(
font: &Font, font: &Font,
direction: rustybuzz::Direction, direction: rustybuzz::Direction,
script: rustybuzz::Script, script: rustybuzz::Script,
@ -922,7 +952,7 @@ fn create_shape_plan(
/// Shape the text with tofus from the given font. /// Shape the text with tofus from the given font.
fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, font: Font) { fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, font: Font) {
let x_advance = font.advance(0).unwrap_or_default(); let x_advance = font.x_advance(0).unwrap_or_default();
let add_glyph = |(cluster, c): (usize, char)| { let add_glyph = |(cluster, c): (usize, char)| {
let start = base + cluster; let start = base + cluster;
let end = start + c.len_utf8(); let end = start + c.len_utf8();
@ -1014,20 +1044,8 @@ fn calculate_adjustability(ctx: &mut ShapingContext, lang: Lang, region: Option<
/// Difference between non-breaking and normal space. /// Difference between non-breaking and normal space.
fn nbsp_delta(font: &Font) -> Option<Em> { fn nbsp_delta(font: &Font) -> Option<Em> {
let space = font.ttf().glyph_index(' ')?.0;
let nbsp = font.ttf().glyph_index('\u{00A0}')?.0; let nbsp = font.ttf().glyph_index('\u{00A0}')?.0;
Some(font.advance(nbsp)? - font.advance(space)?) Some(font.x_advance(nbsp)? - font.space_width()?)
}
/// Process the language and region of a style chain into a
/// rustybuzz-compatible BCP 47 language.
fn language(styles: StyleChain) -> rustybuzz::Language {
let mut bcp: EcoString = TextElem::lang_in(styles).as_str().into();
if let Some(region) = TextElem::region_in(styles) {
bcp.push('-');
bcp.push_str(region.as_str());
}
rustybuzz::Language::from_str(&bcp).unwrap()
} }
/// Returns true if all glyphs in `glyphs` have ranges within the range `range`. /// Returns true if all glyphs in `glyphs` have ranges within the range `range`.

View File

@ -96,9 +96,13 @@ pub fn layout_enum(
let mut cells = vec![]; let mut cells = vec![];
let mut locator = locator.split(); let mut locator = locator.split();
let mut number = let mut number = elem.start(styles).unwrap_or_else(|| {
elem.start(styles) if reversed {
.unwrap_or_else(|| if reversed { elem.children.len() } else { 1 }); elem.children.len() as u64
} else {
1
}
});
let mut parents = EnumElem::parents_in(styles); let mut parents = EnumElem::parents_in(styles);
let full = elem.full(styles); let full = elem.full(styles);

View File

@ -1,9 +1,12 @@
use typst_library::diag::SourceResult; use typst_library::diag::SourceResult;
use typst_library::foundations::{Packed, StyleChain}; use typst_library::foundations::{Packed, StyleChain};
use typst_library::layout::{Em, Frame, Point, Size}; use typst_library::layout::{Em, Frame, Point, Size};
use typst_library::math::{Accent, AccentElem}; use typst_library::math::AccentElem;
use super::{style_cramped, FrameFragment, GlyphFragment, MathContext, MathFragment}; use super::{
style_cramped, style_dtls, style_flac, FrameFragment, GlyphFragment, MathContext,
MathFragment,
};
/// How much the accent can be shorter than the base. /// How much the accent can be shorter than the base.
const ACCENT_SHORT_FALL: Em = Em::new(0.5); const ACCENT_SHORT_FALL: Em = Em::new(0.5);
@ -15,53 +18,71 @@ pub fn layout_accent(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
) -> SourceResult<()> { ) -> SourceResult<()> {
let cramped = style_cramped(); let accent = elem.accent;
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?; let top_accent = !accent.is_bottom();
// Try to replace a glyph with its dotless variant. // Try to replace the base glyph with its dotless variant.
if let MathFragment::Glyph(glyph) = &mut base { let dtls = style_dtls();
glyph.make_dotless_form(ctx); let base_styles =
} if top_accent && elem.dotless(styles) { styles.chain(&dtls) } else { styles };
let cramped = style_cramped();
let base = ctx.layout_into_fragment(&elem.base, base_styles.chain(&cramped))?;
// Preserve class to preserve automatic spacing. // Preserve class to preserve automatic spacing.
let base_class = base.class(); let base_class = base.class();
let base_attach = base.accent_attach(); let base_attach = base.accent_attach();
let width = elem.size(styles).relative_to(base.width()); // Try to replace the accent glyph with its flattened variant.
let Accent(c) = elem.accent;
let mut glyph = GlyphFragment::new(ctx, styles, c, elem.span());
// Try to replace accent glyph with flattened variant.
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height); let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
if base.ascent() > flattened_base_height { let flac = style_flac();
glyph.make_flattened_accent_form(ctx); let accent_styles = if top_accent && base.ascent() > flattened_base_height {
} styles.chain(&flac)
} else {
styles
};
// Forcing the accent to be at least as large as the base makes it too let mut glyph =
// wide in many case. GlyphFragment::new_char(ctx.font, accent_styles, accent.0, elem.span())?;
let short_fall = ACCENT_SHORT_FALL.at(glyph.font_size);
let variant = glyph.stretch_horizontal(ctx, width, short_fall);
let accent = variant.frame;
let accent_attach = variant.accent_attach;
// Forcing the accent to be at least as large as the base makes it too wide
// in many cases.
let width = elem.size(styles).relative_to(base.width());
let short_fall = ACCENT_SHORT_FALL.at(glyph.item.size);
glyph.stretch_horizontal(ctx, width - short_fall);
let accent_attach = glyph.accent_attach.0;
let accent = glyph.into_frame();
let (gap, accent_pos, base_pos) = if top_accent {
// Descent is negative because the accent's ink bottom is above the // Descent is negative because the accent's ink bottom is above the
// baseline. Therefore, the default gap is the accent's negated descent // baseline. Therefore, the default gap is the accent's negated descent
// minus the accent base height. Only if the base is very small, we need // minus the accent base height. Only if the base is very small, we
// a larger gap so that the accent doesn't move too low. // need a larger gap so that the accent doesn't move too low.
let accent_base_height = scaled!(ctx, styles, accent_base_height); let accent_base_height = scaled!(ctx, styles, accent_base_height);
let gap = -accent.descent() - base.ascent().min(accent_base_height); let gap = -accent.descent() - base.ascent().min(accent_base_height);
let size = Size::new(base.width(), accent.height() + gap + base.height()); let accent_pos = Point::with_x(base_attach.0 - accent_attach);
let accent_pos = Point::with_x(base_attach - accent_attach);
let base_pos = Point::with_y(accent.height() + gap); let base_pos = Point::with_y(accent.height() + gap);
(gap, accent_pos, base_pos)
} else {
let gap = -accent.ascent();
let accent_pos = Point::new(base_attach.1 - accent_attach, base.height() + gap);
let base_pos = Point::zero();
(gap, accent_pos, base_pos)
};
let size = Size::new(base.width(), accent.height() + gap + base.height());
let baseline = base_pos.y + base.ascent(); let baseline = base_pos.y + base.ascent();
let base_italics_correction = base.italics_correction(); let base_italics_correction = base.italics_correction();
let base_text_like = base.is_text_like(); let base_text_like = base.is_text_like();
let base_ascent = match &base { let base_ascent = match &base {
MathFragment::Frame(frame) => frame.base_ascent, MathFragment::Frame(frame) => frame.base_ascent,
_ => base.ascent(), _ => base.ascent(),
}; };
let base_descent = match &base {
MathFragment::Frame(frame) => frame.base_descent,
_ => base.descent(),
};
let mut frame = Frame::soft(size); let mut frame = Frame::soft(size);
frame.set_baseline(baseline); frame.set_baseline(baseline);
@ -71,6 +92,7 @@ pub fn layout_accent(
FrameFragment::new(styles, frame) FrameFragment::new(styles, frame)
.with_class(base_class) .with_class(base_class)
.with_base_ascent(base_ascent) .with_base_ascent(base_ascent)
.with_base_descent(base_descent)
.with_italics_correction(base_italics_correction) .with_italics_correction(base_italics_correction)
.with_accent_attach(base_attach) .with_accent_attach(base_attach)
.with_text_like(base_text_like), .with_text_like(base_text_like),

View File

@ -66,7 +66,6 @@ pub fn layout_attach(
let relative_to_width = measure!(t, width).max(measure!(b, width)); let relative_to_width = measure!(t, width).max(measure!(b, width));
stretch_fragment( stretch_fragment(
ctx, ctx,
styles,
&mut base, &mut base,
Some(Axis::X), Some(Axis::X),
Some(relative_to_width), Some(relative_to_width),
@ -220,7 +219,6 @@ fn layout_attachments(
// Calculate the distance each pre-script extends to the left of the base's // Calculate the distance each pre-script extends to the left of the base's
// width. // width.
let (tl_pre_width, bl_pre_width) = compute_pre_script_widths( let (tl_pre_width, bl_pre_width) = compute_pre_script_widths(
ctx,
&base, &base,
[tl.as_ref(), bl.as_ref()], [tl.as_ref(), bl.as_ref()],
(tx_shift, bx_shift), (tx_shift, bx_shift),
@ -231,7 +229,6 @@ fn layout_attachments(
// base's width. Also calculate each post-script's kerning (we need this for // base's width. Also calculate each post-script's kerning (we need this for
// its position later). // its position later).
let ((tr_post_width, tr_kern), (br_post_width, br_kern)) = compute_post_script_widths( let ((tr_post_width, tr_kern), (br_post_width, br_kern)) = compute_post_script_widths(
ctx,
&base, &base,
[tr.as_ref(), br.as_ref()], [tr.as_ref(), br.as_ref()],
(tx_shift, bx_shift), (tx_shift, bx_shift),
@ -287,14 +284,13 @@ fn layout_attachments(
/// post-script's kerning value. The first tuple is for the post-superscript, /// post-script's kerning value. The first tuple is for the post-superscript,
/// and the second is for the post-subscript. /// and the second is for the post-subscript.
fn compute_post_script_widths( fn compute_post_script_widths(
ctx: &MathContext,
base: &MathFragment, base: &MathFragment,
[tr, br]: [Option<&MathFragment>; 2], [tr, br]: [Option<&MathFragment>; 2],
(tr_shift, br_shift): (Abs, Abs), (tr_shift, br_shift): (Abs, Abs),
space_after_post_script: Abs, space_after_post_script: Abs,
) -> ((Abs, Abs), (Abs, Abs)) { ) -> ((Abs, Abs), (Abs, Abs)) {
let tr_values = tr.map_or_default(|tr| { let tr_values = tr.map_or_default(|tr| {
let kern = math_kern(ctx, base, tr, tr_shift, Corner::TopRight); let kern = math_kern(base, tr, tr_shift, Corner::TopRight);
(space_after_post_script + tr.width() + kern, kern) (space_after_post_script + tr.width() + kern, kern)
}); });
@ -302,7 +298,7 @@ fn compute_post_script_widths(
// need to shift the post-subscript left by the base's italic correction // need to shift the post-subscript left by the base's italic correction
// (see the kerning algorithm as described in the OpenType MATH spec). // (see the kerning algorithm as described in the OpenType MATH spec).
let br_values = br.map_or_default(|br| { let br_values = br.map_or_default(|br| {
let kern = math_kern(ctx, base, br, br_shift, Corner::BottomRight) let kern = math_kern(base, br, br_shift, Corner::BottomRight)
- base.italics_correction(); - base.italics_correction();
(space_after_post_script + br.width() + kern, kern) (space_after_post_script + br.width() + kern, kern)
}); });
@ -317,19 +313,18 @@ fn compute_post_script_widths(
/// extends left of the base's width and the second being the distance the /// extends left of the base's width and the second being the distance the
/// pre-subscript extends left of the base's width. /// pre-subscript extends left of the base's width.
fn compute_pre_script_widths( fn compute_pre_script_widths(
ctx: &MathContext,
base: &MathFragment, base: &MathFragment,
[tl, bl]: [Option<&MathFragment>; 2], [tl, bl]: [Option<&MathFragment>; 2],
(tl_shift, bl_shift): (Abs, Abs), (tl_shift, bl_shift): (Abs, Abs),
space_before_pre_script: Abs, space_before_pre_script: Abs,
) -> (Abs, Abs) { ) -> (Abs, Abs) {
let tl_pre_width = tl.map_or_default(|tl| { let tl_pre_width = tl.map_or_default(|tl| {
let kern = math_kern(ctx, base, tl, tl_shift, Corner::TopLeft); let kern = math_kern(base, tl, tl_shift, Corner::TopLeft);
space_before_pre_script + tl.width() + kern space_before_pre_script + tl.width() + kern
}); });
let bl_pre_width = bl.map_or_default(|bl| { let bl_pre_width = bl.map_or_default(|bl| {
let kern = math_kern(ctx, base, bl, bl_shift, Corner::BottomLeft); let kern = math_kern(base, bl, bl_shift, Corner::BottomLeft);
space_before_pre_script + bl.width() + kern space_before_pre_script + bl.width() + kern
}); });
@ -434,9 +429,13 @@ fn compute_script_shifts(
} }
if bl.is_some() || br.is_some() { if bl.is_some() || br.is_some() {
let descent = match &base {
MathFragment::Frame(frame) => frame.base_descent,
_ => base.descent(),
};
shift_down = shift_down shift_down = shift_down
.max(sub_shift_down) .max(sub_shift_down)
.max(if is_text_like { Abs::zero() } else { base.descent() + sub_drop_min }) .max(if is_text_like { Abs::zero() } else { descent + sub_drop_min })
.max(measure!(bl, ascent) - sub_top_max) .max(measure!(bl, ascent) - sub_top_max)
.max(measure!(br, ascent) - sub_top_max); .max(measure!(br, ascent) - sub_top_max);
} }
@ -467,13 +466,7 @@ fn compute_script_shifts(
/// a negative value means shifting the script closer to the base. Requires the /// a negative value means shifting the script closer to the base. Requires the
/// distance from the base's baseline to the script's baseline, as well as the /// distance from the base's baseline to the script's baseline, as well as the
/// script's corner (tl, tr, bl, br). /// script's corner (tl, tr, bl, br).
fn math_kern( fn math_kern(base: &MathFragment, script: &MathFragment, shift: Abs, pos: Corner) -> Abs {
ctx: &MathContext,
base: &MathFragment,
script: &MathFragment,
shift: Abs,
pos: Corner,
) -> Abs {
// This process is described under the MathKernInfo table in the OpenType // This process is described under the MathKernInfo table in the OpenType
// MATH spec. // MATH spec.
@ -498,8 +491,8 @@ fn math_kern(
// Calculate the sum of kerning values for each correction height. // Calculate the sum of kerning values for each correction height.
let summed_kern = |height| { let summed_kern = |height| {
let base_kern = base.kern_at_height(ctx, pos, height); let base_kern = base.kern_at_height(pos, height);
let attach_kern = script.kern_at_height(ctx, pos.inv(), height); let attach_kern = script.kern_at_height(pos.inv(), height);
base_kern + attach_kern base_kern + attach_kern
}; };

View File

@ -109,14 +109,14 @@ fn layout_frac_like(
frame.push_frame(denom_pos, denom); frame.push_frame(denom_pos, denom);
if binom { if binom {
let mut left = GlyphFragment::new(ctx, styles, '(', span) let mut left = GlyphFragment::new_char(ctx.font, styles, '(', span)?;
.stretch_vertical(ctx, height, short_fall); left.stretch_vertical(ctx, height - short_fall);
left.center_on_axis(ctx); left.center_on_axis();
ctx.push(left); ctx.push(left);
ctx.push(FrameFragment::new(styles, frame)); ctx.push(FrameFragment::new(styles, frame));
let mut right = GlyphFragment::new(ctx, styles, ')', span) let mut right = GlyphFragment::new_char(ctx.font, styles, ')', span)?;
.stretch_vertical(ctx, height, short_fall); right.stretch_vertical(ctx, height - short_fall);
right.center_on_axis(ctx); right.center_on_axis();
ctx.push(right); ctx.push(right);
} else { } else {
frame.push( frame.push(

File diff suppressed because it is too large Load Diff

View File

@ -45,20 +45,20 @@ pub fn layout_lr(
// Scale up fragments at both ends. // Scale up fragments at both ends.
match inner_fragments { match inner_fragments {
[one] => scale(ctx, styles, one, relative_to, height, None), [one] => scale_if_delimiter(ctx, one, relative_to, height, None),
[first, .., last] => { [first, .., last] => {
scale(ctx, styles, first, relative_to, height, Some(MathClass::Opening)); scale_if_delimiter(ctx, first, relative_to, height, Some(MathClass::Opening));
scale(ctx, styles, last, relative_to, height, Some(MathClass::Closing)); scale_if_delimiter(ctx, last, relative_to, height, Some(MathClass::Closing));
} }
_ => {} [] => {}
} }
// Handle MathFragment::Variant fragments that should be scaled up. // Handle MathFragment::Glyph fragments that should be scaled up.
for fragment in inner_fragments.iter_mut() { for fragment in inner_fragments.iter_mut() {
if let MathFragment::Variant(ref mut variant) = fragment { if let MathFragment::Glyph(ref mut glyph) = fragment {
if variant.mid_stretched == Some(false) { if glyph.mid_stretched == Some(false) {
variant.mid_stretched = Some(true); glyph.mid_stretched = Some(true);
scale(ctx, styles, fragment, relative_to, height, Some(MathClass::Large)); scale(ctx, fragment, relative_to, height);
} }
} }
} }
@ -95,18 +95,9 @@ pub fn layout_mid(
let mut fragments = ctx.layout_into_fragments(&elem.body, styles)?; let mut fragments = ctx.layout_into_fragments(&elem.body, styles)?;
for fragment in &mut fragments { for fragment in &mut fragments {
match fragment { if let MathFragment::Glyph(ref mut glyph) = fragment {
MathFragment::Glyph(glyph) => { glyph.mid_stretched = Some(false);
let mut new = glyph.clone().into_variant(); glyph.class = MathClass::Relation;
new.mid_stretched = Some(false);
new.class = MathClass::Fence;
*fragment = MathFragment::Variant(new);
}
MathFragment::Variant(variant) => {
variant.mid_stretched = Some(false);
variant.class = MathClass::Fence;
}
_ => {}
} }
} }
@ -114,10 +105,13 @@ pub fn layout_mid(
Ok(()) Ok(())
} }
/// Scale a math fragment to a height. /// Scales a math fragment to a height if it has the class Opening, Closing, or
fn scale( /// Fence.
///
/// In case `apply` is `Some(class)`, `class` will be applied to the fragment if
/// it is a delimiter, in a way that cannot be overridden by the user.
fn scale_if_delimiter(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain,
fragment: &mut MathFragment, fragment: &mut MathFragment,
relative_to: Abs, relative_to: Abs,
height: Rel<Abs>, height: Rel<Abs>,
@ -127,21 +121,23 @@ fn scale(
fragment.class(), fragment.class(),
MathClass::Opening | MathClass::Closing | MathClass::Fence MathClass::Opening | MathClass::Closing | MathClass::Fence
) { ) {
// This unwrap doesn't really matter. If it is None, then the fragment scale(ctx, fragment, relative_to, height);
// won't be stretchable anyways.
let short_fall = DELIM_SHORT_FALL.at(fragment.font_size().unwrap_or_default());
stretch_fragment(
ctx,
styles,
fragment,
Some(Axis::Y),
Some(relative_to),
height,
short_fall,
);
if let Some(class) = apply { if let Some(class) = apply {
fragment.set_class(class); fragment.set_class(class);
} }
} }
} }
/// Scales a math fragment to a height.
fn scale(
ctx: &mut MathContext,
fragment: &mut MathFragment,
relative_to: Abs,
height: Rel<Abs>,
) {
// This unwrap doesn't really matter. If it is None, then the fragment
// won't be stretchable anyways.
let short_fall = DELIM_SHORT_FALL.at(fragment.font_size().unwrap_or_default());
stretch_fragment(ctx, fragment, Some(Axis::Y), Some(relative_to), height, short_fall);
}

View File

@ -1,4 +1,4 @@
use typst_library::diag::{bail, SourceResult}; use typst_library::diag::{bail, warning, SourceResult};
use typst_library::foundations::{Content, Packed, Resolve, StyleChain}; use typst_library::foundations::{Content, Packed, Resolve, StyleChain};
use typst_library::layout::{ use typst_library::layout::{
Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size, Abs, Axes, Em, FixedAlignment, Frame, FrameItem, Point, Ratio, Rel, Size,
@ -9,8 +9,8 @@ use typst_library::visualize::{FillRule, FixedStroke, Geometry, LineCap, Shape};
use typst_syntax::Span; use typst_syntax::Span;
use super::{ use super::{
alignments, delimiter_alignment, stack, style_for_denominator, AlignmentResult, alignments, style_for_denominator, AlignmentResult, FrameFragment, GlyphFragment,
FrameFragment, GlyphFragment, LeftRightAlternator, MathContext, DELIM_SHORT_FALL, LeftRightAlternator, MathContext, DELIM_SHORT_FALL,
}; };
const VERTICAL_PADDING: Ratio = Ratio::new(0.1); const VERTICAL_PADDING: Ratio = Ratio::new(0.1);
@ -23,67 +23,23 @@ pub fn layout_vec(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
) -> SourceResult<()> { ) -> SourceResult<()> {
let delim = elem.delim(styles); let span = elem.span();
let frame = layout_vec_body(
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx, ctx,
styles, styles,
&elem.children, &[column],
elem.align(styles), elem.align(styles),
elem.gap(styles),
LeftRightAlternator::Right, LeftRightAlternator::Right,
None,
Axes::with_y(elem.gap(styles)),
span,
"elements",
)?; )?;
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
}
/// Lays out a [`MatElem`].
#[typst_macros::time(name = "math.mat", span = elem.span())]
pub fn layout_mat(
elem: &Packed<MatElem>,
ctx: &mut MathContext,
styles: StyleChain,
) -> SourceResult<()> {
let augment = elem.augment(styles);
let rows = &elem.rows;
if let Some(aug) = &augment {
for &offset in &aug.hline.0 {
if offset == 0 || offset.unsigned_abs() >= rows.len() {
bail!(
elem.span(),
"cannot draw a horizontal line after row {} of a matrix with {} rows",
if offset < 0 { rows.len() as isize + offset } else { offset },
rows.len()
);
}
}
let ncols = rows.first().map_or(0, |row| row.len());
for &offset in &aug.vline.0 {
if offset == 0 || offset.unsigned_abs() >= ncols {
bail!(
elem.span(),
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
}
}
}
let delim = elem.delim(styles); let delim = elem.delim(styles);
let frame = layout_mat_body( layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
ctx,
styles,
rows,
elem.align(styles),
augment,
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
elem.span(),
)?;
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), elem.span())
} }
/// Lays out a [`CasesElem`]. /// Lays out a [`CasesElem`].
@ -93,60 +49,100 @@ pub fn layout_cases(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
) -> SourceResult<()> { ) -> SourceResult<()> {
let delim = elem.delim(styles); let span = elem.span();
let frame = layout_vec_body(
let column: Vec<&Content> = elem.children.iter().collect();
let frame = layout_body(
ctx, ctx,
styles, styles,
&elem.children, &[column],
FixedAlignment::Start, FixedAlignment::Start,
elem.gap(styles),
LeftRightAlternator::None, LeftRightAlternator::None,
None,
Axes::with_y(elem.gap(styles)),
span,
"branches",
)?; )?;
let delim = elem.delim(styles);
let (open, close) = let (open, close) =
if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) }; if elem.reverse(styles) { (None, delim.close()) } else { (delim.open(), None) };
layout_delimiters(ctx, styles, frame, open, close, span)
layout_delimiters(ctx, styles, frame, open, close, elem.span())
} }
/// Layout the inner contents of a vector. /// Lays out a [`MatElem`].
fn layout_vec_body( #[typst_macros::time(name = "math.mat", span = elem.span())]
pub fn layout_mat(
elem: &Packed<MatElem>,
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
column: &[Content], ) -> SourceResult<()> {
align: FixedAlignment, let span = elem.span();
row_gap: Rel<Abs>, let rows = &elem.rows;
alternator: LeftRightAlternator, let ncols = rows.first().map_or(0, |row| row.len());
) -> SourceResult<Frame> {
let gap = row_gap.relative_to(ctx.region.size.y);
let denom_style = style_for_denominator(styles); let augment = elem.augment(styles);
let mut flat = vec![]; if let Some(aug) = &augment {
for child in column { for &offset in &aug.hline.0 {
// We allow linebreaks in cases and vectors, which are functionally if offset == 0 || offset.unsigned_abs() >= rows.len() {
// identical to commas. bail!(
flat.extend(ctx.layout_into_run(child, styles.chain(&denom_style))?.rows()); span,
"cannot draw a horizontal line after row {} of a matrix with {} rows",
if offset < 0 { rows.len() as isize + offset } else { offset },
rows.len()
);
} }
// We pad ascent and descent with the ascent and descent of the paren }
// to ensure that normal vectors are aligned with others unless they are
// way too big. for &offset in &aug.vline.0 {
let paren = if offset == 0 || offset.unsigned_abs() >= ncols {
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached()); bail!(
Ok(stack(flat, align, gap, 0, alternator, Some((paren.ascent, paren.descent)))) span,
"cannot draw a vertical line after column {} of a matrix with {} columns",
if offset < 0 { ncols as isize + offset } else { offset },
ncols
);
}
}
}
// Transpose rows of the matrix into columns.
let mut row_iters: Vec<_> = rows.iter().map(|i| i.iter()).collect();
let columns: Vec<Vec<_>> = (0..ncols)
.map(|_| row_iters.iter_mut().map(|i| i.next().unwrap()).collect())
.collect();
let frame = layout_body(
ctx,
styles,
&columns,
elem.align(styles),
LeftRightAlternator::Right,
augment,
Axes::new(elem.column_gap(styles), elem.row_gap(styles)),
span,
"cells",
)?;
let delim = elem.delim(styles);
layout_delimiters(ctx, styles, frame, delim.open(), delim.close(), span)
} }
/// Layout the inner contents of a matrix. /// Layout the inner contents of a matrix, vector, or cases.
fn layout_mat_body( #[allow(clippy::too_many_arguments)]
fn layout_body(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
rows: &[Vec<Content>], columns: &[Vec<&Content>],
align: FixedAlignment, align: FixedAlignment,
alternator: LeftRightAlternator,
augment: Option<Augment<Abs>>, augment: Option<Augment<Abs>>,
gap: Axes<Rel<Abs>>, gap: Axes<Rel<Abs>>,
span: Span, span: Span,
children: &str,
) -> SourceResult<Frame> { ) -> SourceResult<Frame> {
let ncols = rows.first().map_or(0, |row| row.len()); let nrows = columns.first().map_or(0, |col| col.len());
let nrows = rows.len(); let ncols = columns.len();
if ncols == 0 || nrows == 0 { if ncols == 0 || nrows == 0 {
return Ok(Frame::soft(Size::zero())); return Ok(Frame::soft(Size::zero()));
} }
@ -178,29 +174,40 @@ fn layout_mat_body(
// Before the full matrix body can be laid out, the // Before the full matrix body can be laid out, the
// individual cells must first be independently laid out // individual cells must first be independently laid out
// so we can ensure alignment across rows and columns. // so we can ensure alignment across rows and columns.
let mut cols = vec![vec![]; ncols];
// This variable stores the maximum ascent and descent for each row. // This variable stores the maximum ascent and descent for each row.
let mut heights = vec![(Abs::zero(), Abs::zero()); nrows]; let mut heights = vec![(Abs::zero(), Abs::zero()); nrows];
// We want to transpose our data layout to columns
// before final layout. For efficiency, the columns
// variable is set up here and newly generated
// individual cells are then added to it.
let mut cols = vec![vec![]; ncols];
let denom_style = style_for_denominator(styles); let denom_style = style_for_denominator(styles);
// We pad ascent and descent with the ascent and descent of the paren // We pad ascent and descent with the ascent and descent of the paren
// to ensure that normal matrices are aligned with others unless they are // to ensure that normal matrices are aligned with others unless they are
// way too big. // way too big.
let paren = let paren = GlyphFragment::new_char(
GlyphFragment::new(ctx, styles.chain(&denom_style), '(', Span::detached()); ctx.font,
styles.chain(&denom_style),
'(',
Span::detached(),
)?;
for (row, (ascent, descent)) in rows.iter().zip(&mut heights) { for (column, col) in columns.iter().zip(&mut cols) {
for (cell, col) in row.iter().zip(&mut cols) { for (cell, (ascent, descent)) in column.iter().zip(&mut heights) {
let cell_span = cell.span();
let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?; let cell = ctx.layout_into_run(cell, styles.chain(&denom_style))?;
ascent.set_max(cell.ascent().max(paren.ascent)); // We ignore linebreaks in the cells as we can't differentiate
descent.set_max(cell.descent().max(paren.descent)); // alignment points for the whole body from ones for a specific
// cell, and multiline cells don't quite make sense at the moment.
if cell.is_multiline() {
ctx.engine.sink.warn(warning!(
cell_span,
"linebreaks are ignored in {}", children;
hint: "use commas instead to separate each line"
));
}
ascent.set_max(cell.ascent().max(paren.ascent()));
descent.set_max(cell.descent().max(paren.descent()));
col.push(cell); col.push(cell);
} }
@ -222,7 +229,7 @@ fn layout_mat_body(
let mut y = Abs::zero(); let mut y = Abs::zero();
for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) { for (cell, &(ascent, descent)) in col.into_iter().zip(&heights) {
let cell = cell.into_line_frame(&points, LeftRightAlternator::Right); let cell = cell.into_line_frame(&points, alternator);
let pos = Point::new( let pos = Point::new(
if points.is_empty() { if points.is_empty() {
x + align.position(rcol - cell.width()) x + align.position(rcol - cell.width())
@ -309,19 +316,19 @@ fn layout_delimiters(
let target = height + VERTICAL_PADDING.of(height); let target = height + VERTICAL_PADDING.of(height);
frame.set_baseline(height / 2.0 + axis); frame.set_baseline(height / 2.0 + axis);
if let Some(left) = left { if let Some(left_c) = left {
let mut left = GlyphFragment::new(ctx, styles, left, span) let mut left = GlyphFragment::new_char(ctx.font, styles, left_c, span)?;
.stretch_vertical(ctx, target, short_fall); left.stretch_vertical(ctx, target - short_fall);
left.align_on_axis(ctx, delimiter_alignment(left.c)); left.center_on_axis();
ctx.push(left); ctx.push(left);
} }
ctx.push(FrameFragment::new(styles, frame)); ctx.push(FrameFragment::new(styles, frame));
if let Some(right) = right { if let Some(right_c) = right {
let mut right = GlyphFragment::new(ctx, styles, right, span) let mut right = GlyphFragment::new_char(ctx.font, styles, right_c, span)?;
.stretch_vertical(ctx, target, short_fall); right.stretch_vertical(ctx, target - short_fall);
right.align_on_axis(ctx, delimiter_alignment(right.c)); right.center_on_axis();
ctx.push(right); ctx.push(right);
} }

View File

@ -13,8 +13,6 @@ mod stretch;
mod text; mod text;
mod underover; mod underover;
use rustybuzz::Feature;
use ttf_parser::Tag;
use typst_library::diag::{bail, SourceResult}; use typst_library::diag::{bail, SourceResult};
use typst_library::engine::Engine; use typst_library::engine::Engine;
use typst_library::foundations::{ use typst_library::foundations::{
@ -30,7 +28,7 @@ use typst_library::math::*;
use typst_library::model::ParElem; use typst_library::model::ParElem;
use typst_library::routines::{Arenas, RealizationKind}; use typst_library::routines::{Arenas, RealizationKind};
use typst_library::text::{ use typst_library::text::{
families, features, variant, Font, LinebreakElem, SpaceElem, TextEdgeBounds, TextElem, families, variant, Font, LinebreakElem, SpaceElem, TextEdgeBounds, TextElem,
}; };
use typst_library::World; use typst_library::World;
use typst_syntax::Span; use typst_syntax::Span;
@ -38,11 +36,11 @@ use typst_utils::Numeric;
use unicode_math_class::MathClass; use unicode_math_class::MathClass;
use self::fragment::{ use self::fragment::{
FrameFragment, GlyphFragment, GlyphwiseSubsts, Limits, MathFragment, VariantFragment, has_dtls_feat, stretch_axes, FrameFragment, GlyphFragment, Limits, MathFragment,
}; };
use self::run::{LeftRightAlternator, MathRun, MathRunFrameBuilder}; use self::run::{LeftRightAlternator, MathRun, MathRunFrameBuilder};
use self::shared::*; use self::shared::*;
use self::stretch::{stretch_fragment, stretch_glyph}; use self::stretch::stretch_fragment;
/// Layout an inline equation (in a paragraph). /// Layout an inline equation (in a paragraph).
#[typst_macros::time(span = elem.span())] #[typst_macros::time(span = elem.span())]
@ -58,7 +56,7 @@ pub fn layout_equation_inline(
let font = find_math_font(engine, styles, elem.span())?; let font = find_math_font(engine, styles, elem.span())?;
let mut locator = locator.split(); let mut locator = locator.split();
let mut ctx = MathContext::new(engine, &mut locator, styles, region, &font); let mut ctx = MathContext::new(engine, &mut locator, region, &font);
let scale_style = style_for_script_scale(&ctx); let scale_style = style_for_script_scale(&ctx);
let styles = styles.chain(&scale_style); let styles = styles.chain(&scale_style);
@ -113,7 +111,7 @@ pub fn layout_equation_block(
let font = find_math_font(engine, styles, span)?; let font = find_math_font(engine, styles, span)?;
let mut locator = locator.split(); let mut locator = locator.split();
let mut ctx = MathContext::new(engine, &mut locator, styles, regions.base(), &font); let mut ctx = MathContext::new(engine, &mut locator, regions.base(), &font);
let scale_style = style_for_script_scale(&ctx); let scale_style = style_for_script_scale(&ctx);
let styles = styles.chain(&scale_style); let styles = styles.chain(&scale_style);
@ -374,14 +372,7 @@ struct MathContext<'a, 'v, 'e> {
region: Region, region: Region,
// Font-related. // Font-related.
font: &'a Font, font: &'a Font,
ttf: &'a ttf_parser::Face<'a>,
table: ttf_parser::math::Table<'a>,
constants: ttf_parser::math::Constants<'a>, constants: ttf_parser::math::Constants<'a>,
dtls_table: Option<GlyphwiseSubsts<'a>>,
flac_table: Option<GlyphwiseSubsts<'a>>,
ssty_table: Option<GlyphwiseSubsts<'a>>,
glyphwise_tables: Option<Vec<GlyphwiseSubsts<'a>>>,
space_width: Em,
// Mutable. // Mutable.
fragments: Vec<MathFragment>, fragments: Vec<MathFragment>,
} }
@ -391,46 +382,20 @@ impl<'a, 'v, 'e> MathContext<'a, 'v, 'e> {
fn new( fn new(
engine: &'v mut Engine<'e>, engine: &'v mut Engine<'e>,
locator: &'v mut SplitLocator<'a>, locator: &'v mut SplitLocator<'a>,
styles: StyleChain<'a>,
base: Size, base: Size,
font: &'a Font, font: &'a Font,
) -> Self { ) -> Self {
let math_table = font.ttf().tables().math.unwrap(); // These unwraps are safe as the font given is one returned by the
let gsub_table = font.ttf().tables().gsub; // find_math_font function, which only returns fonts that have a math
let constants = math_table.constants.unwrap(); // constants table.
let constants = font.ttf().tables().math.unwrap().constants.unwrap();
let feat = |tag: &[u8; 4]| {
GlyphwiseSubsts::new(gsub_table, Feature::new(Tag::from_bytes(tag), 0, ..))
};
let features = features(styles);
let glyphwise_tables = Some(
features
.into_iter()
.filter_map(|feature| GlyphwiseSubsts::new(gsub_table, feature))
.collect(),
);
let ttf = font.ttf();
let space_width = ttf
.glyph_index(' ')
.and_then(|id| ttf.glyph_hor_advance(id))
.map(|advance| font.to_em(advance))
.unwrap_or(THICK);
Self { Self {
engine, engine,
locator, locator,
region: Region::new(base, Axes::splat(false)), region: Region::new(base, Axes::splat(false)),
font, font,
ttf,
table: math_table,
constants, constants,
dtls_table: feat(b"dtls"),
flac_table: feat(b"flac"),
ssty_table: feat(b"ssty"),
glyphwise_tables,
space_width,
fragments: vec![], fragments: vec![],
} }
} }
@ -529,7 +494,8 @@ fn layout_realized(
if let Some(elem) = elem.to_packed::<TagElem>() { if let Some(elem) = elem.to_packed::<TagElem>() {
ctx.push(MathFragment::Tag(elem.tag.clone())); ctx.push(MathFragment::Tag(elem.tag.clone()));
} else if elem.is::<SpaceElem>() { } else if elem.is::<SpaceElem>() {
ctx.push(MathFragment::Space(ctx.space_width.resolve(styles))); let space_width = ctx.font.space_width().unwrap_or(THICK);
ctx.push(MathFragment::Space(space_width.resolve(styles)));
} else if elem.is::<LinebreakElem>() { } else if elem.is::<LinebreakElem>() {
ctx.push(MathFragment::Linebreak); ctx.push(MathFragment::Linebreak);
} else if let Some(elem) = elem.to_packed::<HElem>() { } else if let Some(elem) = elem.to_packed::<HElem>() {

View File

@ -49,9 +49,9 @@ pub fn layout_root(
// Layout root symbol. // Layout root symbol.
let target = radicand.height() + thickness + gap; let target = radicand.height() + thickness + gap;
let sqrt = GlyphFragment::new(ctx, styles, '√', span) let mut sqrt = GlyphFragment::new_char(ctx.font, styles, '√', span)?;
.stretch_vertical(ctx, target, Abs::zero()) sqrt.stretch_vertical(ctx, target);
.frame; let sqrt = sqrt.into_frame();
// Layout the index. // Layout the index.
let sscript = EquationElem::set_size(MathSize::ScriptScript).wrap(); let sscript = EquationElem::set_size(MathSize::ScriptScript).wrap();

View File

@ -278,6 +278,9 @@ impl MathRun {
frame frame
} }
/// Convert this run of math fragments into a vector of inline items for
/// paragraph layout. Creates multiple fragments when relation or binary
/// operators are present to allow for line-breaking opportunities later.
pub fn into_par_items(self) -> Vec<InlineItem> { pub fn into_par_items(self) -> Vec<InlineItem> {
let mut items = vec![]; let mut items = vec![];
@ -295,22 +298,25 @@ impl MathRun {
let mut space_is_visible = false; let mut space_is_visible = false;
let is_relation = |f: &MathFragment| matches!(f.class(), MathClass::Relation);
let is_space = |f: &MathFragment| { let is_space = |f: &MathFragment| {
matches!(f, MathFragment::Space(_) | MathFragment::Spacing(_, _)) matches!(f, MathFragment::Space(_) | MathFragment::Spacing(_, _))
}; };
let is_line_break_opportunity = |class, next_fragment| match class {
// Don't split when two relations are in a row or when preceding a
// closing parenthesis.
MathClass::Binary => next_fragment != Some(MathClass::Closing),
MathClass::Relation => {
!matches!(next_fragment, Some(MathClass::Relation | MathClass::Closing))
}
_ => false,
};
let mut iter = self.0.into_iter().peekable(); let mut iter = self.0.into_iter().peekable();
while let Some(fragment) = iter.next() { while let Some(fragment) = iter.next() {
if space_is_visible { if space_is_visible && is_space(&fragment) {
match fragment { items.push(InlineItem::Space(fragment.width(), true));
MathFragment::Space(width) | MathFragment::Spacing(width, _) => {
items.push(InlineItem::Space(width, true));
continue; continue;
} }
_ => {}
}
}
let class = fragment.class(); let class = fragment.class();
let y = fragment.ascent(); let y = fragment.ascent();
@ -323,10 +329,9 @@ impl MathRun {
frame.push_frame(pos, fragment.into_frame()); frame.push_frame(pos, fragment.into_frame());
empty = false; empty = false;
if class == MathClass::Binary // Split our current frame when we encounter a binary operator or
|| (class == MathClass::Relation // relation so that there is a line-breaking opportunity.
&& !iter.peek().map(is_relation).unwrap_or_default()) if is_line_break_opportunity(class, iter.peek().map(|f| f.class())) {
{
let mut frame_prev = let mut frame_prev =
std::mem::replace(&mut frame, Frame::soft(Size::zero())); std::mem::replace(&mut frame, Frame::soft(Size::zero()));

View File

@ -1,7 +1,9 @@
use ttf_parser::math::MathValue; use ttf_parser::math::MathValue;
use ttf_parser::Tag;
use typst_library::foundations::{Style, StyleChain}; use typst_library::foundations::{Style, StyleChain};
use typst_library::layout::{Abs, Em, FixedAlignment, Frame, Point, Size, VAlignment}; use typst_library::layout::{Abs, Em, FixedAlignment, Frame, Point, Size};
use typst_library::math::{EquationElem, MathSize}; use typst_library::math::{EquationElem, MathSize};
use typst_library::text::{FontFeatures, TextElem};
use typst_utils::LazyHash; use typst_utils::LazyHash;
use super::{LeftRightAlternator, MathContext, MathFragment, MathRun}; use super::{LeftRightAlternator, MathContext, MathFragment, MathRun};
@ -59,6 +61,16 @@ pub fn style_cramped() -> LazyHash<Style> {
EquationElem::set_cramped(true).wrap() EquationElem::set_cramped(true).wrap()
} }
/// Sets flac OpenType feature.
pub fn style_flac() -> LazyHash<Style> {
TextElem::set_features(FontFeatures(vec![(Tag::from_bytes(b"flac"), 1)])).wrap()
}
/// Sets dtls OpenType feature.
pub fn style_dtls() -> LazyHash<Style> {
TextElem::set_features(FontFeatures(vec![(Tag::from_bytes(b"dtls"), 1)])).wrap()
}
/// The style for subscripts in the current style. /// The style for subscripts in the current style.
pub fn style_for_subscript(styles: StyleChain) -> [LazyHash<Style>; 2] { pub fn style_for_subscript(styles: StyleChain) -> [LazyHash<Style>; 2] {
[style_for_superscript(styles), EquationElem::set_cramped(true).wrap()] [style_for_superscript(styles), EquationElem::set_cramped(true).wrap()]
@ -97,15 +109,6 @@ pub fn style_for_script_scale(ctx: &MathContext) -> LazyHash<Style> {
.wrap() .wrap()
} }
/// How a delimieter should be aligned when scaling.
pub fn delimiter_alignment(delimiter: char) -> VAlignment {
match delimiter {
'⌜' | '⌝' => VAlignment::Top,
'⌞' | '⌟' => VAlignment::Bottom,
_ => VAlignment::Horizon,
}
}
/// Stack rows on top of each other. /// Stack rows on top of each other.
/// ///
/// Add a `gap` between each row and uses the baseline of the `baseline`-th /// Add a `gap` between each row and uses the baseline of the `baseline`-th
@ -117,7 +120,6 @@ pub fn stack(
gap: Abs, gap: Abs,
baseline: usize, baseline: usize,
alternator: LeftRightAlternator, alternator: LeftRightAlternator,
minimum_ascent_descent: Option<(Abs, Abs)>,
) -> Frame { ) -> Frame {
let AlignmentResult { points, width } = alignments(&rows); let AlignmentResult { points, width } = alignments(&rows);
let rows: Vec<_> = rows let rows: Vec<_> = rows
@ -125,13 +127,9 @@ pub fn stack(
.map(|row| row.into_line_frame(&points, alternator)) .map(|row| row.into_line_frame(&points, alternator))
.collect(); .collect();
let padded_height = |height: Abs| {
height.max(minimum_ascent_descent.map_or(Abs::zero(), |(a, d)| a + d))
};
let mut frame = Frame::soft(Size::new( let mut frame = Frame::soft(Size::new(
width, width,
rows.iter().map(|row| padded_height(row.height())).sum::<Abs>() rows.iter().map(|row| row.height()).sum::<Abs>()
+ rows.len().saturating_sub(1) as f64 * gap, + rows.len().saturating_sub(1) as f64 * gap,
)); ));
@ -142,14 +140,11 @@ pub fn stack(
} else { } else {
Abs::zero() Abs::zero()
}; };
let ascent_padded_part = minimum_ascent_descent let pos = Point::new(x, y);
.map_or(Abs::zero(), |(a, _)| (a - row.ascent()))
.max(Abs::zero());
let pos = Point::new(x, y + ascent_padded_part);
if i == baseline { if i == baseline {
frame.set_baseline(y + row.baseline() + ascent_padded_part); frame.set_baseline(y + row.baseline());
} }
y += padded_height(row.height()) + gap; y += row.height() + gap;
frame.push_frame(pos, row); frame.push_frame(pos, row);
} }

View File

@ -1,19 +1,10 @@
use ttf_parser::math::{GlyphAssembly, GlyphConstruction, GlyphPart};
use ttf_parser::LazyArray16;
use typst_library::diag::{warning, SourceResult}; use typst_library::diag::{warning, SourceResult};
use typst_library::foundations::{Packed, StyleChain}; use typst_library::foundations::{Packed, StyleChain};
use typst_library::layout::{Abs, Axis, Frame, Point, Rel, Size}; use typst_library::layout::{Abs, Axis, Rel};
use typst_library::math::StretchElem; use typst_library::math::StretchElem;
use typst_utils::Get; use typst_utils::Get;
use super::{ use super::{stretch_axes, MathContext, MathFragment};
delimiter_alignment, GlyphFragment, MathContext, MathFragment, Scaled,
VariantFragment,
};
use crate::modifiers::FrameModify;
/// Maximum number of times extenders can be repeated.
const MAX_REPEATS: usize = 1024;
/// Lays out a [`StretchElem`]. /// Lays out a [`StretchElem`].
#[typst_macros::time(name = "math.stretch", span = elem.span())] #[typst_macros::time(name = "math.stretch", span = elem.span())]
@ -23,15 +14,7 @@ pub fn layout_stretch(
styles: StyleChain, styles: StyleChain,
) -> SourceResult<()> { ) -> SourceResult<()> {
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?; let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
stretch_fragment( stretch_fragment(ctx, &mut fragment, None, None, elem.size(styles), Abs::zero());
ctx,
styles,
&mut fragment,
None,
None,
elem.size(styles),
Abs::zero(),
);
ctx.push(fragment); ctx.push(fragment);
Ok(()) Ok(())
} }
@ -39,269 +22,49 @@ pub fn layout_stretch(
/// Attempts to stretch the given fragment by/to the amount given in stretch. /// Attempts to stretch the given fragment by/to the amount given in stretch.
pub fn stretch_fragment( pub fn stretch_fragment(
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain,
fragment: &mut MathFragment, fragment: &mut MathFragment,
axis: Option<Axis>, axis: Option<Axis>,
relative_to: Option<Abs>, relative_to: Option<Abs>,
stretch: Rel<Abs>, stretch: Rel<Abs>,
short_fall: Abs, short_fall: Abs,
) { ) {
let glyph = match fragment { let size = fragment.size();
MathFragment::Glyph(glyph) => glyph.clone(),
MathFragment::Variant(variant) => { let MathFragment::Glyph(ref mut glyph) = fragment else { return };
GlyphFragment::new(ctx, styles, variant.c, variant.span)
}
_ => return,
};
// Return if we attempt to stretch along an axis which isn't stretchable, // Return if we attempt to stretch along an axis which isn't stretchable,
// so that the original fragment isn't modified. // so that the original fragment isn't modified.
let Some(stretch_axis) = stretch_axis(ctx, &glyph) else { return }; let axes = stretch_axes(&glyph.item.font, glyph.base_glyph.id);
let axis = axis.unwrap_or(stretch_axis); let stretch_axis = if let Some(axis) = axis {
if axis != stretch_axis { if !axes.get(axis) {
return; return;
} }
axis
let relative_to_size = relative_to.unwrap_or_else(|| fragment.size().get(axis)); } else {
match (axes.x, axes.y) {
let mut variant = stretch_glyph( (true, false) => Axis::X,
ctx, (false, true) => Axis::Y,
glyph, (false, false) => return,
stretch.relative_to(relative_to_size), (true, true) => {
short_fall,
axis,
);
if axis == Axis::Y {
variant.align_on_axis(ctx, delimiter_alignment(variant.c));
}
*fragment = MathFragment::Variant(variant);
}
/// Return whether the glyph is stretchable and if it is, along which axis it
/// can be stretched.
fn stretch_axis(ctx: &mut MathContext, base: &GlyphFragment) -> Option<Axis> {
let base_id = base.id;
let vertical = ctx
.table
.variants
.and_then(|variants| variants.vertical_constructions.get(base_id))
.map(|_| Axis::Y);
let horizontal = ctx
.table
.variants
.and_then(|variants| variants.horizontal_constructions.get(base_id))
.map(|_| Axis::X);
match (vertical, horizontal) {
(vertical, None) => vertical,
(None, horizontal) => horizontal,
_ => {
// As far as we know, there aren't any glyphs that have both // As far as we know, there aren't any glyphs that have both
// vertical and horizontal constructions. So for the time being, we // vertical and horizontal constructions. So for the time being, we
// will assume that a glyph cannot have both. // will assume that a glyph cannot have both.
ctx.engine.sink.warn(warning!( ctx.engine.sink.warn(warning!(
base.span, glyph.item.glyphs[0].span.0,
"glyph has both vertical and horizontal constructions"; "glyph has both vertical and horizontal constructions";
hint: "this is probably a font bug"; hint: "this is probably a font bug";
hint: "please file an issue at https://github.com/typst/typst/issues" hint: "please file an issue at https://github.com/typst/typst/issues"
)); ));
return;
None
} }
} }
}
/// Try to stretch a glyph to a desired width or height.
///
/// The resulting frame may not have the exact desired width.
pub fn stretch_glyph(
ctx: &mut MathContext,
mut base: GlyphFragment,
target: Abs,
short_fall: Abs,
axis: Axis,
) -> VariantFragment {
// If the base glyph is good enough, use it.
let advance = match axis {
Axis::X => base.width,
Axis::Y => base.height(),
};
let short_target = target - short_fall;
if short_target <= advance {
return base.into_variant();
}
let mut min_overlap = Abs::zero();
let construction = ctx
.table
.variants
.and_then(|variants| {
min_overlap = variants.min_connector_overlap.scaled(ctx, base.font_size);
match axis {
Axis::X => variants.horizontal_constructions,
Axis::Y => variants.vertical_constructions,
}
.get(base.id)
})
.unwrap_or(GlyphConstruction { assembly: None, variants: LazyArray16::new(&[]) });
// Search for a pre-made variant with a good advance.
let mut best_id = base.id;
let mut best_advance = base.width;
for variant in construction.variants {
best_id = variant.variant_glyph;
best_advance = base.font.to_em(variant.advance_measurement).at(base.font_size);
if short_target <= best_advance {
break;
}
}
// This is either good or the best we've got.
if short_target <= best_advance || construction.assembly.is_none() {
base.set_id(ctx, best_id);
return base.into_variant();
}
// Assemble from parts.
let assembly = construction.assembly.unwrap();
assemble(ctx, base, assembly, min_overlap, target, axis)
}
/// Assemble a glyph from parts.
fn assemble(
ctx: &mut MathContext,
base: GlyphFragment,
assembly: GlyphAssembly,
min_overlap: Abs,
target: Abs,
axis: Axis,
) -> VariantFragment {
// Determine the number of times the extenders need to be repeated as well
// as a ratio specifying how much to spread the parts apart
// (0 = maximal overlap, 1 = minimal overlap).
let mut full;
let mut ratio;
let mut repeat = 0;
loop {
full = Abs::zero();
ratio = 0.0;
let mut parts = parts(assembly, repeat).peekable();
let mut growable = Abs::zero();
while let Some(part) = parts.next() {
let mut advance = part.full_advance.scaled(ctx, base.font_size);
if let Some(next) = parts.peek() {
let max_overlap = part
.end_connector_length
.min(next.start_connector_length)
.scaled(ctx, base.font_size);
if max_overlap < min_overlap {
// This condition happening is indicative of a bug in the
// font.
ctx.engine.sink.warn(warning!(
base.span,
"glyph has assembly parts with overlap less than minConnectorOverlap";
hint: "its rendering may appear broken - this is probably a font bug";
hint: "please file an issue at https://github.com/typst/typst/issues"
));
}
advance -= max_overlap;
growable += max_overlap - min_overlap;
}
full += advance;
}
if full < target {
let delta = target - full;
ratio = (delta / growable).min(1.0);
full += ratio * growable;
}
if target <= full || repeat >= MAX_REPEATS {
break;
}
repeat += 1;
}
let mut selected = vec![];
let mut parts = parts(assembly, repeat).peekable();
while let Some(part) = parts.next() {
let mut advance = part.full_advance.scaled(ctx, base.font_size);
if let Some(next) = parts.peek() {
let max_overlap = part
.end_connector_length
.min(next.start_connector_length)
.scaled(ctx, base.font_size);
advance -= max_overlap;
advance += ratio * (max_overlap - min_overlap);
}
let mut fragment = base.clone();
fragment.set_id(ctx, part.glyph_id);
selected.push((fragment, advance));
}
let size;
let baseline;
match axis {
Axis::X => {
let height = base.ascent + base.descent;
size = Size::new(full, height);
baseline = base.ascent;
}
Axis::Y => {
let axis = ctx.constants.axis_height().scaled(ctx, base.font_size);
let width = selected.iter().map(|(f, _)| f.width).max().unwrap_or_default();
size = Size::new(width, full);
baseline = full / 2.0 + axis;
}
}
let mut frame = Frame::soft(size);
let mut offset = Abs::zero();
frame.set_baseline(baseline);
frame.modify(&base.modifiers);
for (fragment, advance) in selected {
let pos = match axis {
Axis::X => Point::new(offset, frame.baseline() - fragment.ascent),
Axis::Y => Point::with_y(full - offset - fragment.height()),
};
frame.push_frame(pos, fragment.into_frame());
offset += advance;
}
let accent_attach = match axis {
Axis::X => frame.width() / 2.0,
Axis::Y => base.accent_attach,
}; };
VariantFragment { let relative_to_size = relative_to.unwrap_or_else(|| size.get(stretch_axis));
c: base.c,
frame, glyph.stretch(ctx, stretch.relative_to(relative_to_size) - short_fall, stretch_axis);
font_size: base.font_size,
italics_correction: Abs::zero(), if stretch_axis == Axis::Y {
accent_attach, glyph.center_on_axis();
class: base.class,
math_size: base.math_size,
span: base.span,
limits: base.limits,
mid_stretched: None,
extended_shape: true,
} }
} }
/// Return an iterator over the assembly's parts with extenders repeated the
/// specified number of times.
fn parts(assembly: GlyphAssembly, repeat: usize) -> impl Iterator<Item = GlyphPart> + '_ {
assembly.parts.into_iter().flat_map(move |part| {
let count = if part.part_flags.extender() { repeat } else { 1 };
std::iter::repeat_n(part, count)
})
}

View File

@ -12,7 +12,10 @@ use typst_syntax::{is_newline, Span};
use unicode_math_class::MathClass; use unicode_math_class::MathClass;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use super::{FrameFragment, GlyphFragment, MathContext, MathFragment, MathRun}; use super::{
has_dtls_feat, style_dtls, FrameFragment, GlyphFragment, MathContext, MathFragment,
MathRun,
};
/// Lays out a [`TextElem`]. /// Lays out a [`TextElem`].
pub fn layout_text( pub fn layout_text(
@ -65,19 +68,9 @@ fn layout_inline_text(
// Small optimization for numbers. Note that this lays out slightly // Small optimization for numbers. Note that this lays out slightly
// differently to normal text and is worth re-evaluating in the future. // differently to normal text and is worth re-evaluating in the future.
let mut fragments = vec![]; let mut fragments = vec![];
let is_single = text.chars().count() == 1;
for unstyled_c in text.chars() { for unstyled_c in text.chars() {
let c = styled_char(styles, unstyled_c, false); let c = styled_char(styles, unstyled_c, false);
let mut glyph = GlyphFragment::new(ctx, styles, c, span); let glyph = GlyphFragment::new_char(ctx.font, styles, c, span)?;
if is_single {
// Duplicate what `layout_glyph` does exactly even if it's
// probably incorrect here.
match EquationElem::size_in(styles) {
MathSize::Script => glyph.make_script_size(ctx),
MathSize::ScriptScript => glyph.make_script_script_size(ctx),
_ => {}
}
}
fragments.push(glyph.into()); fragments.push(glyph.into());
} }
let frame = MathRun::new(fragments).into_frame(styles); let frame = MathRun::new(fragments).into_frame(styles);
@ -126,15 +119,21 @@ pub fn layout_symbol(
) -> SourceResult<()> { ) -> SourceResult<()> {
// Switch dotless char to normal when we have the dtls OpenType feature. // Switch dotless char to normal when we have the dtls OpenType feature.
// This should happen before the main styling pass. // This should happen before the main styling pass.
let (unstyled_c, dtls) = match try_dotless(elem.text) { let dtls = style_dtls();
Some(c) if ctx.dtls_table.is_some() => (c, true), let (unstyled_c, symbol_styles) = match try_dotless(elem.text) {
_ => (elem.text, false), Some(c) if has_dtls_feat(ctx.font) => (c, styles.chain(&dtls)),
_ => (elem.text, styles),
}; };
let c = styled_char(styles, unstyled_c, true); let c = styled_char(styles, unstyled_c, true);
let fragment = match GlyphFragment::try_new(ctx, styles, c, elem.span()) { let fragment: MathFragment =
Some(glyph) => layout_glyph(glyph, dtls, ctx, styles), match GlyphFragment::new_char(ctx.font, symbol_styles, c, elem.span()) {
None => { Ok(mut glyph) => {
adjust_glyph_layout(&mut glyph, ctx, styles);
glyph.into()
}
Err(_) => {
// Not in the math font, fallback to normal inline text layout. // Not in the math font, fallback to normal inline text layout.
// TODO: Should replace this with proper fallback in [`GlyphFragment::new`].
layout_inline_text(c.encode_utf8(&mut [0; 4]), elem.span(), ctx, styles)? layout_inline_text(c.encode_utf8(&mut [0; 4]), elem.span(), ctx, styles)?
.into() .into()
} }
@ -143,37 +142,22 @@ pub fn layout_symbol(
Ok(()) Ok(())
} }
/// Layout a [`GlyphFragment`]. /// Centers large glyphs vertically on the axis, scaling them if in display
fn layout_glyph( /// style.
mut glyph: GlyphFragment, fn adjust_glyph_layout(
dtls: bool, glyph: &mut GlyphFragment,
ctx: &mut MathContext, ctx: &mut MathContext,
styles: StyleChain, styles: StyleChain,
) -> MathFragment { ) {
if dtls {
glyph.make_dotless_form(ctx);
}
let math_size = EquationElem::size_in(styles);
match math_size {
MathSize::Script => glyph.make_script_size(ctx),
MathSize::ScriptScript => glyph.make_script_script_size(ctx),
_ => {}
}
if glyph.class == MathClass::Large { if glyph.class == MathClass::Large {
let mut variant = if math_size == MathSize::Display { if EquationElem::size_in(styles) == MathSize::Display {
let height = scaled!(ctx, styles, display_operator_min_height) let height = scaled!(ctx, styles, display_operator_min_height)
.max(SQRT_2 * glyph.height()); .max(SQRT_2 * glyph.size.y);
glyph.stretch_vertical(ctx, height, Abs::zero()) glyph.stretch_vertical(ctx, height);
} else {
glyph.into_variant()
}; };
// TeXbook p 155. Large operators are always vertically centered on the // TeXbook p 155. Large operators are always vertically centered on the
// axis. // axis.
variant.center_on_axis(ctx); glyph.center_on_axis();
variant.into()
} else {
glyph.into()
} }
} }

View File

@ -285,14 +285,14 @@ fn layout_underoverspreader(
let body = ctx.layout_into_run(body, styles)?; let body = ctx.layout_into_run(body, styles)?;
let body_class = body.class(); let body_class = body.class();
let body = body.into_fragment(styles); let body = body.into_fragment(styles);
let glyph = GlyphFragment::new(ctx, styles, c, span); let mut glyph = GlyphFragment::new_char(ctx.font, styles, c, span)?;
let stretched = glyph.stretch_horizontal(ctx, body.width(), Abs::zero()); glyph.stretch_horizontal(ctx, body.width());
let mut rows = vec![]; let mut rows = vec![];
let baseline = match position { let baseline = match position {
Position::Under => { Position::Under => {
rows.push(MathRun::new(vec![body])); rows.push(MathRun::new(vec![body]));
rows.push(stretched.into()); rows.push(glyph.into());
if let Some(annotation) = annotation { if let Some(annotation) = annotation {
let under_style = style_for_subscript(styles); let under_style = style_for_subscript(styles);
let annotation_styles = styles.chain(&under_style); let annotation_styles = styles.chain(&under_style);
@ -306,20 +306,14 @@ fn layout_underoverspreader(
let annotation_styles = styles.chain(&over_style); let annotation_styles = styles.chain(&over_style);
rows.extend(ctx.layout_into_run(annotation, annotation_styles)?.rows()); rows.extend(ctx.layout_into_run(annotation, annotation_styles)?.rows());
} }
rows.push(stretched.into()); rows.push(glyph.into());
rows.push(MathRun::new(vec![body])); rows.push(MathRun::new(vec![body]));
rows.len() - 1 rows.len() - 1
} }
}; };
let frame = stack( let frame =
rows, stack(rows, FixedAlignment::Center, gap, baseline, LeftRightAlternator::Right);
FixedAlignment::Center,
gap,
baseline,
LeftRightAlternator::Right,
None,
);
ctx.push(FrameFragment::new(styles, frame).with_class(body_class)); ctx.push(FrameFragment::new(styles, frame).with_class(body_class));
Ok(()) Ok(())

View File

@ -1,6 +1,6 @@
use typst_library::foundations::StyleChain; use typst_library::foundations::StyleChain;
use typst_library::layout::{Fragment, Frame, FrameItem, HideElem, Point}; use typst_library::layout::{Abs, Fragment, Frame, FrameItem, HideElem, Point, Sides};
use typst_library::model::{Destination, LinkElem}; use typst_library::model::{Destination, LinkElem, ParElem};
/// Frame-level modifications resulting from styles that do not impose any /// Frame-level modifications resulting from styles that do not impose any
/// layout structure. /// layout structure.
@ -52,14 +52,7 @@ pub trait FrameModify {
impl FrameModify for Frame { impl FrameModify for Frame {
fn modify(&mut self, modifiers: &FrameModifiers) { fn modify(&mut self, modifiers: &FrameModifiers) {
if let Some(dest) = &modifiers.dest { modify_frame(self, modifiers, None);
let size = self.size();
self.push(Point::zero(), FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
self.hide();
}
} }
} }
@ -82,6 +75,41 @@ where
} }
} }
pub trait FrameModifyText {
/// Resolve and apply [`FrameModifiers`] for this text frame.
fn modify_text(&mut self, styles: StyleChain);
}
impl FrameModifyText for Frame {
fn modify_text(&mut self, styles: StyleChain) {
let modifiers = FrameModifiers::get_in(styles);
let expand_y = 0.5 * ParElem::leading_in(styles);
let outset = Sides::new(Abs::zero(), expand_y, Abs::zero(), expand_y);
modify_frame(self, &modifiers, Some(outset));
}
}
fn modify_frame(
frame: &mut Frame,
modifiers: &FrameModifiers,
link_box_outset: Option<Sides<Abs>>,
) {
if let Some(dest) = &modifiers.dest {
let mut pos = Point::zero();
let mut size = frame.size();
if let Some(outset) = link_box_outset {
pos.y -= outset.top;
pos.x -= outset.left;
size += outset.sum_by_axis();
}
frame.push(pos, FrameItem::Link(dest.clone(), size));
}
if modifiers.hidden {
frame.hide();
}
}
/// Performs layout and modification in one step. /// Performs layout and modification in one step.
/// ///
/// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`, /// This just runs `layout(styles).modified(&FrameModifiers::get_in(styles))`,

View File

@ -29,6 +29,7 @@ csv = { workspace = true }
ecow = { workspace = true } ecow = { workspace = true }
flate2 = { workspace = true } flate2 = { workspace = true }
fontdb = { workspace = true } fontdb = { workspace = true }
glidesort = { workspace = true }
hayagriva = { workspace = true } hayagriva = { workspace = true }
icu_properties = { workspace = true } icu_properties = { workspace = true }
icu_provider = { workspace = true } icu_provider = { workspace = true }
@ -65,6 +66,7 @@ unicode-normalization = { workspace = true }
unicode-segmentation = { workspace = true } unicode-segmentation = { workspace = true }
unscanny = { workspace = true } unscanny = { workspace = true }
usvg = { workspace = true } usvg = { workspace = true }
utf8_iter = { workspace = true }
wasmi = { workspace = true } wasmi = { workspace = true }
xmlwriter = { workspace = true } xmlwriter = { workspace = true }

View File

@ -1,17 +1,20 @@
//! Diagnostics. //! Diagnostics.
use std::fmt::{self, Display, Formatter}; use std::fmt::{self, Display, Formatter, Write as _};
use std::io; use std::io;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::Utf8Error; use std::str::Utf8Error;
use std::string::FromUtf8Error; use std::string::FromUtf8Error;
use az::SaturatingAs;
use comemo::Tracked; use comemo::Tracked;
use ecow::{eco_vec, EcoVec}; use ecow::{eco_vec, EcoVec};
use typst_syntax::package::{PackageSpec, PackageVersion}; use typst_syntax::package::{PackageSpec, PackageVersion};
use typst_syntax::{Span, Spanned, SyntaxError}; use typst_syntax::{Lines, Span, Spanned, SyntaxError};
use utf8_iter::ErrorReportingUtf8Chars;
use crate::engine::Engine; use crate::engine::Engine;
use crate::loading::{LoadSource, Loaded};
use crate::{World, WorldExt}; use crate::{World, WorldExt};
/// Early-return with a [`StrResult`] or [`SourceResult`]. /// Early-return with a [`StrResult`] or [`SourceResult`].
@ -148,7 +151,7 @@ pub struct Warned<T> {
pub warnings: EcoVec<SourceDiagnostic>, pub warnings: EcoVec<SourceDiagnostic>,
} }
/// An error or warning in a source file. /// An error or warning in a source or text file.
/// ///
/// The contained spans will only be detached if any of the input source files /// The contained spans will only be detached if any of the input source files
/// were detached. /// were detached.
@ -568,31 +571,287 @@ impl From<PackageError> for EcoString {
} }
} }
/// A result type with a data-loading-related error.
pub type LoadResult<T> = Result<T, LoadError>;
/// A call site independent error that occurred during data loading. This avoids
/// polluting the memoization with [`Span`]s and [`FileId`]s from source files.
/// Can be turned into a [`SourceDiagnostic`] using the [`LoadedWithin::within`]
/// method available on [`LoadResult`].
///
/// [`FileId`]: typst_syntax::FileId
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct LoadError {
/// The position in the file at which the error occured.
pos: ReportPos,
/// Must contain a message formatted like this: `"failed to do thing (cause)"`.
message: EcoString,
}
impl LoadError {
/// Creates a new error from a position in a file, a base message
/// (e.g. `failed to parse JSON`) and a concrete error (e.g. `invalid
/// number`)
pub fn new(
pos: impl Into<ReportPos>,
message: impl std::fmt::Display,
error: impl std::fmt::Display,
) -> Self {
Self {
pos: pos.into(),
message: eco_format!("{message} ({error})"),
}
}
}
impl From<Utf8Error> for LoadError {
fn from(err: Utf8Error) -> Self {
let start = err.valid_up_to();
let end = start + err.error_len().unwrap_or(0);
LoadError::new(
start..end,
"failed to convert to string",
"file is not valid utf-8",
)
}
}
/// Convert a [`LoadResult`] to a [`SourceResult`] by adding the [`Loaded`]
/// context.
pub trait LoadedWithin<T> {
/// Report an error, possibly in an external file.
fn within(self, loaded: &Loaded) -> SourceResult<T>;
}
impl<T, E> LoadedWithin<T> for Result<T, E>
where
E: Into<LoadError>,
{
fn within(self, loaded: &Loaded) -> SourceResult<T> {
self.map_err(|err| {
let LoadError { pos, message } = err.into();
load_err_in_text(loaded, pos, message)
})
}
}
/// Report an error, possibly in an external file. This will delegate to
/// [`load_err_in_invalid_text`] if the data isn't valid utf-8.
fn load_err_in_text(
loaded: &Loaded,
pos: impl Into<ReportPos>,
mut message: EcoString,
) -> EcoVec<SourceDiagnostic> {
let pos = pos.into();
// This also does utf-8 validation. Only report an error in an external
// file if it is human readable (valid utf-8), otherwise fall back to
// `load_err_in_invalid_text`.
let lines = Lines::try_from(&loaded.data);
match (loaded.source.v, lines) {
(LoadSource::Path(file_id), Ok(lines)) => {
if let Some(range) = pos.range(&lines) {
let span = Span::from_range(file_id, range);
return eco_vec![SourceDiagnostic::error(span, message)];
}
// Either `ReportPos::None` was provided, or resolving the range
// from the line/column failed. If present report the possibly
// wrong line/column in the error message anyway.
let span = Span::from_range(file_id, 0..loaded.data.len());
if let Some(pair) = pos.line_col(&lines) {
message.pop();
let (line, col) = pair.numbers();
write!(&mut message, " at {line}:{col})").ok();
}
eco_vec![SourceDiagnostic::error(span, message)]
}
(LoadSource::Bytes, Ok(lines)) => {
if let Some(pair) = pos.line_col(&lines) {
message.pop();
let (line, col) = pair.numbers();
write!(&mut message, " at {line}:{col})").ok();
}
eco_vec![SourceDiagnostic::error(loaded.source.span, message)]
}
_ => load_err_in_invalid_text(loaded, pos, message),
}
}
/// Report an error (possibly from an external file) that isn't valid utf-8.
fn load_err_in_invalid_text(
loaded: &Loaded,
pos: impl Into<ReportPos>,
mut message: EcoString,
) -> EcoVec<SourceDiagnostic> {
let line_col = pos.into().try_line_col(&loaded.data).map(|p| p.numbers());
match (loaded.source.v, line_col) {
(LoadSource::Path(file), _) => {
message.pop();
if let Some(package) = file.package() {
write!(
&mut message,
" in {package}{}",
file.vpath().as_rooted_path().display()
)
.ok();
} else {
write!(&mut message, " in {}", file.vpath().as_rootless_path().display())
.ok();
};
if let Some((line, col)) = line_col {
write!(&mut message, ":{line}:{col}").ok();
}
message.push(')');
}
(LoadSource::Bytes, Some((line, col))) => {
message.pop();
write!(&mut message, " at {line}:{col})").ok();
}
(LoadSource::Bytes, None) => (),
}
eco_vec![SourceDiagnostic::error(loaded.source.span, message)]
}
/// A position at which an error was reported.
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash)]
pub enum ReportPos {
/// Contains a range, and a line/column pair.
Full(std::ops::Range<u32>, LineCol),
/// Contains a range.
Range(std::ops::Range<u32>),
/// Contains a line/column pair.
LineCol(LineCol),
#[default]
None,
}
impl From<std::ops::Range<usize>> for ReportPos {
fn from(value: std::ops::Range<usize>) -> Self {
Self::Range(value.start.saturating_as()..value.end.saturating_as())
}
}
impl From<LineCol> for ReportPos {
fn from(value: LineCol) -> Self {
Self::LineCol(value)
}
}
impl ReportPos {
/// Creates a position from a pre-existing range and line-column pair.
pub fn full(range: std::ops::Range<usize>, pair: LineCol) -> Self {
let range = range.start.saturating_as()..range.end.saturating_as();
Self::Full(range, pair)
}
/// Tries to determine the byte range for this position.
fn range(&self, lines: &Lines<String>) -> Option<std::ops::Range<usize>> {
match self {
ReportPos::Full(range, _) => Some(range.start as usize..range.end as usize),
ReportPos::Range(range) => Some(range.start as usize..range.end as usize),
&ReportPos::LineCol(pair) => {
let i =
lines.line_column_to_byte(pair.line as usize, pair.col as usize)?;
Some(i..i)
}
ReportPos::None => None,
}
}
/// Tries to determine the line/column for this position.
fn line_col(&self, lines: &Lines<String>) -> Option<LineCol> {
match self {
&ReportPos::Full(_, pair) => Some(pair),
ReportPos::Range(range) => {
let (line, col) = lines.byte_to_line_column(range.start as usize)?;
Some(LineCol::zero_based(line, col))
}
&ReportPos::LineCol(pair) => Some(pair),
ReportPos::None => None,
}
}
/// Either gets the line/column pair, or tries to compute it from possibly
/// invalid utf-8 data.
fn try_line_col(&self, bytes: &[u8]) -> Option<LineCol> {
match self {
&ReportPos::Full(_, pair) => Some(pair),
ReportPos::Range(range) => {
LineCol::try_from_byte_pos(range.start as usize, bytes)
}
&ReportPos::LineCol(pair) => Some(pair),
ReportPos::None => None,
}
}
}
/// A line/column pair.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
/// The 0-based line.
line: u32,
/// The 0-based column.
col: u32,
}
impl LineCol {
/// Constructs the line/column pair from 0-based indices.
pub fn zero_based(line: usize, col: usize) -> Self {
Self {
line: line.saturating_as(),
col: col.saturating_as(),
}
}
/// Constructs the line/column pair from 1-based numbers.
pub fn one_based(line: usize, col: usize) -> Self {
Self::zero_based(line.saturating_sub(1), col.saturating_sub(1))
}
/// Try to compute a line/column pair from possibly invalid utf-8 data.
pub fn try_from_byte_pos(pos: usize, bytes: &[u8]) -> Option<Self> {
let bytes = &bytes[..pos];
let mut line = 0;
#[allow(clippy::double_ended_iterator_last)]
let line_start = memchr::memchr_iter(b'\n', bytes)
.inspect(|_| line += 1)
.last()
.map(|i| i + 1)
.unwrap_or(bytes.len());
let col = ErrorReportingUtf8Chars::new(&bytes[line_start..]).count();
Some(LineCol::zero_based(line, col))
}
/// Returns the 0-based line/column indices.
pub fn indices(&self) -> (usize, usize) {
(self.line as usize, self.col as usize)
}
/// Returns the 1-based line/column numbers.
pub fn numbers(&self) -> (usize, usize) {
(self.line as usize + 1, self.col as usize + 1)
}
}
/// Format a user-facing error message for an XML-like file format. /// Format a user-facing error message for an XML-like file format.
pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString { pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> LoadError {
match error { let pos = LineCol::one_based(error.pos().row as usize, error.pos().col as usize);
roxmltree::Error::UnexpectedCloseTag(expected, actual, pos) => { let message = match error {
eco_format!( roxmltree::Error::UnexpectedCloseTag(expected, actual, _) => {
"failed to parse {format} (found closing tag '{actual}' \ eco_format!("failed to parse {format} (found closing tag '{actual}' instead of '{expected}')")
instead of '{expected}' in line {})",
pos.row
)
} }
roxmltree::Error::UnknownEntityReference(entity, pos) => { roxmltree::Error::UnknownEntityReference(entity, _) => {
eco_format!( eco_format!("failed to parse {format} (unknown entity '{entity}')")
"failed to parse {format} (unknown entity '{entity}' in line {})",
pos.row
)
} }
roxmltree::Error::DuplicatedAttribute(attr, pos) => { roxmltree::Error::DuplicatedAttribute(attr, _) => {
eco_format!( eco_format!("failed to parse {format} (duplicate attribute '{attr}')")
"failed to parse {format} (duplicate attribute '{attr}' in line {})",
pos.row
)
} }
roxmltree::Error::NoRootNode => { roxmltree::Error::NoRootNode => {
eco_format!("failed to parse {format} (missing root node)") eco_format!("failed to parse {format} (missing root node)")
} }
err => eco_format!("failed to parse {format} ({err})"), err => eco_format!("failed to parse {format} ({err})"),
} };
LoadError { pos: pos.into(), message }
} }

View File

@ -172,17 +172,29 @@ impl Array {
} }
/// Returns the first item in the array. May be used on the left-hand side /// Returns the first item in the array. May be used on the left-hand side
/// of an assignment. Fails with an error if the array is empty. /// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
#[func] #[func]
pub fn first(&self) -> StrResult<Value> { pub fn first(
self.0.first().cloned().ok_or_else(array_is_empty) &self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.first().cloned().or(default).ok_or_else(array_is_empty)
} }
/// Returns the last item in the array. May be used on the left-hand side of /// Returns the last item in the array. May be used on the left-hand side of
/// an assignment. Fails with an error if the array is empty. /// an assignment. Returns the default value if the array is empty
/// or fails with an error is no default value was specified.
#[func] #[func]
pub fn last(&self) -> StrResult<Value> { pub fn last(
self.0.last().cloned().ok_or_else(array_is_empty) &self,
/// A default value to return if the array is empty.
#[named]
default: Option<Value>,
) -> StrResult<Value> {
self.0.last().cloned().or(default).ok_or_else(array_is_empty)
} }
/// Returns the item at the specified index in the array. May be used on the /// Returns the item at the specified index in the array. May be used on the
@ -796,7 +808,7 @@ impl Array {
/// function. The sorting algorithm used is stable. /// function. The sorting algorithm used is stable.
/// ///
/// Returns an error if two values could not be compared or if the key /// Returns an error if two values could not be compared or if the key
/// function (if given) yields an error. /// or comparison function (if given) yields an error.
/// ///
/// To sort according to multiple criteria at once, e.g. in case of equality /// To sort according to multiple criteria at once, e.g. in case of equality
/// between some criteria, the key function can return an array. The results /// between some criteria, the key function can return an array. The results
@ -820,17 +832,116 @@ impl Array {
/// determine the keys to sort by. /// determine the keys to sort by.
#[named] #[named]
key: Option<Func>, key: Option<Func>,
/// If given, uses this function to compare elements in the array.
///
/// This function should return a boolean: `{true}` indicates that the
/// elements are in order, while `{false}` indicates that they should be
/// swapped. To keep the sort stable, if the two elements are equal, the
/// function should return `{true}`.
///
/// If this function does not order the elements properly (e.g., by
/// returning `{false}` for both `{(x, y)}` and `{(y, x)}`, or for
/// `{(x, x)}`), the resulting array will be in unspecified order.
///
/// When used together with `key`, `by` will be passed the keys instead
/// of the elements.
///
/// ```example
/// #(
/// "sorted",
/// "by",
/// "decreasing",
/// "length",
/// ).sorted(
/// key: s => s.len(),
/// by: (l, r) => l >= r,
/// )
/// ```
#[named]
by: Option<Func>,
) -> SourceResult<Array> { ) -> SourceResult<Array> {
match by {
Some(by) => {
let mut are_in_order = |mut x, mut y| {
if let Some(f) = &key {
// We rely on `comemo`'s memoization of function
// evaluation to not excessively reevaluate the key.
x = f.call(engine, context, [x])?;
y = f.call(engine, context, [y])?;
}
match by.call(engine, context, [x, y])? {
Value::Bool(b) => Ok(b),
x => {
bail!(
span,
"expected boolean from `by` function, got {}",
x.ty(),
)
}
}
};
// If a comparison function is provided, we use `glidesort`
// instead of the standard library sorting algorithm to prevent
// panics in case the comparison function does not define a
// valid order (see https://github.com/typst/typst/pull/5627).
let mut result = Ok(()); let mut result = Ok(());
let mut vec = self.0; let mut vec = self.0.into_iter().enumerate().collect::<Vec<_>>();
glidesort::sort_by(&mut vec, |(i, x), (j, y)| {
// Because we use booleans for the comparison function, in
// order to keep the sort stable, we need to compare in the
// right order.
if i < j {
// If `x` and `y` appear in this order in the original
// array, then we should change their order (i.e.,
// return `Ordering::Greater`) iff `y` is strictly less
// than `x` (i.e., `compare(x, y)` returns `false`).
// Otherwise, we should keep them in the same order
// (i.e., return `Ordering::Less`).
match are_in_order(x.clone(), y.clone()) {
Ok(false) => Ordering::Greater,
Ok(true) => Ordering::Less,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
} else {
// If `x` and `y` appear in the opposite order in the
// original array, then we should change their order
// (i.e., return `Ordering::Less`) iff `x` is strictly
// less than `y` (i.e., `compare(y, x)` returns
// `false`). Otherwise, we should keep them in the same
// order (i.e., return `Ordering::Less`).
match are_in_order(y.clone(), x.clone()) {
Ok(false) => Ordering::Less,
Ok(true) => Ordering::Greater,
Err(err) => {
if result.is_ok() {
result = Err(err);
}
Ordering::Equal
}
}
}
});
result.map(|()| vec.into_iter().map(|(_, x)| x).collect())
}
None => {
let mut key_of = |x: Value| match &key { let mut key_of = |x: Value| match &key {
// NOTE: We are relying on `comemo`'s memoization of function // We rely on `comemo`'s memoization of function evaluation
// evaluation to not excessively reevaluate the `key`. // to not excessively reevaluate the key.
Some(f) => f.call(engine, context, [x]), Some(f) => f.call(engine, context, [x]),
None => Ok(x), None => Ok(x),
}; };
// If no comparison function is provided, we know the order is
// valid, so we can use the standard library sort and prevent an
// extra allocation.
let mut result = Ok(());
let mut vec = self.0;
vec.make_mut().sort_by(|a, b| { vec.make_mut().sort_by(|a, b| {
// Until we get `try` blocks :)
match (key_of(a.clone()), key_of(b.clone())) { match (key_of(a.clone()), key_of(b.clone())) {
(Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| { (Ok(a), Ok(b)) => ops::compare(&a, &b).unwrap_or_else(|err| {
if result.is_ok() { if result.is_ok() {
@ -846,7 +957,9 @@ impl Array {
} }
} }
}); });
result.map(|_| vec.into()) result.map(|()| vec.into())
}
}
} }
/// Deduplicates all items in the array. /// Deduplicates all items in the array.

View File

@ -7,6 +7,7 @@ use std::sync::Arc;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use serde::{Serialize, Serializer}; use serde::{Serialize, Serializer};
use typst_syntax::Lines;
use typst_utils::LazyHash; use typst_utils::LazyHash;
use crate::diag::{bail, StrResult}; use crate::diag::{bail, StrResult};
@ -286,6 +287,16 @@ impl Serialize for Bytes {
} }
} }
impl TryFrom<&Bytes> for Lines<String> {
type Error = Utf8Error;
#[comemo::memoize]
fn try_from(value: &Bytes) -> Result<Lines<String>, Utf8Error> {
let text = value.as_str()?;
Ok(Lines::new(text.to_string()))
}
}
/// Any type that can back a byte buffer. /// Any type that can back a byte buffer.
trait Bytelike: Send + Sync { trait Bytelike: Send + Sync {
fn as_bytes(&self) -> &[u8]; fn as_bytes(&self) -> &[u8];

View File

@ -708,12 +708,13 @@ pub fn fract(
} }
} }
/// Rounds a number to the nearest integer away from zero. /// Rounds a number to the nearest integer.
/// ///
/// Optionally, a number of decimal places can be specified. /// Half-integers are rounded away from zero.
/// ///
/// If the number of digits is negative, its absolute value will indicate the /// Optionally, a number of decimal places can be specified. If negative, its
/// amount of significant integer digits to remove before the decimal point. /// absolute value will indicate the amount of significant integer digits to
/// remove before the decimal point.
/// ///
/// Note that this function will return the same type as the operand. That is, /// Note that this function will return the same type as the operand. That is,
/// applying `round` to a [`float`] will return a `float`, and to a [`decimal`], /// applying `round` to a [`float`] will return a `float`, and to a [`decimal`],

View File

@ -3,7 +3,7 @@ use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::iter::{self, Sum}; use std::iter::{self, Sum};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::{Add, AddAssign, Deref, DerefMut}; use std::ops::{Add, AddAssign, ControlFlow, Deref, DerefMut};
use std::sync::Arc; use std::sync::Arc;
use comemo::Tracked; use comemo::Tracked;
@ -414,10 +414,11 @@ impl Content {
/// Elements produced in `show` rules will not be included in the results. /// Elements produced in `show` rules will not be included in the results.
pub fn query(&self, selector: Selector) -> Vec<Content> { pub fn query(&self, selector: Selector) -> Vec<Content> {
let mut results = Vec::new(); let mut results = Vec::new();
self.traverse(&mut |element| { let _ = self.traverse(&mut |element| -> ControlFlow<()> {
if selector.matches(&element, None) { if selector.matches(&element, None) {
results.push(element); results.push(element);
} }
ControlFlow::Continue(())
}); });
results results
} }
@ -427,54 +428,58 @@ impl Content {
/// ///
/// Elements produced in `show` rules will not be included in the results. /// Elements produced in `show` rules will not be included in the results.
pub fn query_first(&self, selector: &Selector) -> Option<Content> { pub fn query_first(&self, selector: &Selector) -> Option<Content> {
let mut result = None; self.traverse(&mut |element| -> ControlFlow<Content> {
self.traverse(&mut |element| { if selector.matches(&element, None) {
if result.is_none() && selector.matches(&element, None) { ControlFlow::Break(element)
result = Some(element); } else {
ControlFlow::Continue(())
} }
}); })
result .break_value()
} }
/// Extracts the plain text of this content. /// Extracts the plain text of this content.
pub fn plain_text(&self) -> EcoString { pub fn plain_text(&self) -> EcoString {
let mut text = EcoString::new(); let mut text = EcoString::new();
self.traverse(&mut |element| { let _ = self.traverse(&mut |element| -> ControlFlow<()> {
if let Some(textable) = element.with::<dyn PlainText>() { if let Some(textable) = element.with::<dyn PlainText>() {
textable.plain_text(&mut text); textable.plain_text(&mut text);
} }
ControlFlow::Continue(())
}); });
text text
} }
/// Traverse this content. /// Traverse this content.
fn traverse<F>(&self, f: &mut F) fn traverse<F, B>(&self, f: &mut F) -> ControlFlow<B>
where where
F: FnMut(Content), F: FnMut(Content) -> ControlFlow<B>,
{ {
f(self.clone());
self.inner
.elem
.fields()
.into_iter()
.for_each(|(_, value)| walk_value(value, f));
/// Walks a given value to find any content that matches the selector. /// Walks a given value to find any content that matches the selector.
fn walk_value<F>(value: Value, f: &mut F) ///
/// Returns early if the function gives `ControlFlow::Break`.
fn walk_value<F, B>(value: Value, f: &mut F) -> ControlFlow<B>
where where
F: FnMut(Content), F: FnMut(Content) -> ControlFlow<B>,
{ {
match value { match value {
Value::Content(content) => content.traverse(f), Value::Content(content) => content.traverse(f),
Value::Array(array) => { Value::Array(array) => {
for value in array { for value in array {
walk_value(value, f); walk_value(value, f)?;
}
ControlFlow::Continue(())
}
_ => ControlFlow::Continue(()),
} }
} }
_ => {}
} // Call f on the element itself before recursively iterating its fields.
f(self.clone())?;
for (_, value) in self.inner.elem.fields() {
walk_value(value, f)?;
} }
ControlFlow::Continue(())
} }
} }

View File

@ -1,4 +1,6 @@
use std::num::{NonZeroI64, NonZeroIsize, NonZeroU64, NonZeroUsize, ParseIntError}; use std::num::{
NonZeroI64, NonZeroIsize, NonZeroU32, NonZeroU64, NonZeroUsize, ParseIntError,
};
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use smallvec::SmallVec; use smallvec::SmallVec;
@ -482,3 +484,16 @@ cast! {
"number too large" "number too large"
})?, })?,
} }
cast! {
NonZeroU32,
self => Value::Int(self.get() as _),
v: i64 => v
.try_into()
.and_then(|v: u32| v.try_into())
.map_err(|_| if v <= 0 {
"number must be positive"
} else {
"number too large"
})?,
}

View File

@ -77,6 +77,7 @@ pub use {
indexmap::IndexMap, indexmap::IndexMap,
}; };
use comemo::TrackedMut;
use ecow::EcoString; use ecow::EcoString;
use typst_syntax::Spanned; use typst_syntax::Spanned;
@ -297,5 +298,14 @@ pub fn eval(
for (key, value) in dict { for (key, value) in dict {
scope.bind(key.into(), Binding::new(value, span)); scope.bind(key.into(), Binding::new(value, span));
} }
(engine.routines.eval_string)(engine.routines, engine.world, &text, span, mode, scope)
(engine.routines.eval_string)(
engine.routines,
engine.world,
TrackedMut::reborrow_mut(&mut engine.sink),
&text,
span,
mode,
scope,
)
} }

View File

@ -7,9 +7,10 @@ use typst_syntax::FileId;
use crate::diag::{bail, DeprecationSink, StrResult}; use crate::diag::{bail, DeprecationSink, StrResult};
use crate::foundations::{repr, ty, Content, Scope, Value}; use crate::foundations::{repr, ty, Content, Scope, Value};
/// An module of definitions. /// A collection of variables and functions that are commonly related to
/// a single theme.
/// ///
/// A module /// A module can
/// - be built-in /// - be built-in
/// - stem from a [file import]($scripting/#modules) /// - stem from a [file import]($scripting/#modules)
/// - stem from a [package import]($scripting/#packages) (and thus indirectly /// - stem from a [package import]($scripting/#packages) (and thus indirectly

View File

@ -151,8 +151,8 @@ pub fn plugin(
/// A [path]($syntax/#paths) to a WebAssembly file or raw WebAssembly bytes. /// A [path]($syntax/#paths) to a WebAssembly file or raw WebAssembly bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Module> { ) -> SourceResult<Module> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
Plugin::module(data).at(source.span) Plugin::module(loaded.data).at(source.span)
} }
#[scope] #[scope]

View File

@ -39,11 +39,25 @@ use crate::foundations::{
/// #type(image("glacier.jpg")). /// #type(image("glacier.jpg")).
/// ``` /// ```
/// ///
/// The type of `10` is `int`. Now, what is the type of `int` or even `type`? /// The type of `{10}` is `int`. Now, what is the type of `int` or even `type`?
/// ```example /// ```example
/// #type(int) \ /// #type(int) \
/// #type(type) /// #type(type)
/// ``` /// ```
///
/// Unlike other types like `int`, [none] and [auto] do not have a name
/// representing them. To test if a value is one of these, compare your value to
/// them directly, e.g:
/// ```example
/// #let val = none
/// #if val == none [
/// Yep, it's none.
/// ]
/// ```
///
/// Note that `type` will return [`content`] for all document elements. To
/// programmatically determine which kind of content you are dealing with, see
/// [`content.func`].
#[ty(scope, cast)] #[ty(scope, cast)]
#[derive(Copy, Clone, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Type(Static<NativeTypeData>); pub struct Type(Static<NativeTypeData>);

View File

@ -229,10 +229,10 @@ impl Counter {
if self.is_page() { if self.is_page() {
let at_delta = let at_delta =
engine.introspector.page(location).get().saturating_sub(at_page.get()); engine.introspector.page(location).get().saturating_sub(at_page.get());
at_state.step(NonZeroUsize::ONE, at_delta); at_state.step(NonZeroUsize::ONE, at_delta as u64);
let final_delta = let final_delta =
engine.introspector.pages().get().saturating_sub(final_page.get()); engine.introspector.pages().get().saturating_sub(final_page.get());
final_state.step(NonZeroUsize::ONE, final_delta); final_state.step(NonZeroUsize::ONE, final_delta as u64);
} }
Ok(CounterState(smallvec![at_state.first(), final_state.first()])) Ok(CounterState(smallvec![at_state.first(), final_state.first()]))
} }
@ -250,7 +250,7 @@ impl Counter {
if self.is_page() { if self.is_page() {
let delta = let delta =
engine.introspector.page(location).get().saturating_sub(page.get()); engine.introspector.page(location).get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta); state.step(NonZeroUsize::ONE, delta as u64);
} }
Ok(state) Ok(state)
} }
@ -319,7 +319,7 @@ impl Counter {
let delta = page.get() - prev.get(); let delta = page.get() - prev.get();
if delta > 0 { if delta > 0 {
state.step(NonZeroUsize::ONE, delta); state.step(NonZeroUsize::ONE, delta as u64);
} }
} }
@ -500,7 +500,7 @@ impl Counter {
let (mut state, page) = sequence.last().unwrap().clone(); let (mut state, page) = sequence.last().unwrap().clone();
if self.is_page() { if self.is_page() {
let delta = engine.introspector.pages().get().saturating_sub(page.get()); let delta = engine.introspector.pages().get().saturating_sub(page.get());
state.step(NonZeroUsize::ONE, delta); state.step(NonZeroUsize::ONE, delta as u64);
} }
Ok(state) Ok(state)
} }
@ -616,13 +616,13 @@ pub trait Count {
/// Counts through elements with different levels. /// Counts through elements with different levels.
#[derive(Debug, Clone, PartialEq, Hash)] #[derive(Debug, Clone, PartialEq, Hash)]
pub struct CounterState(pub SmallVec<[usize; 3]>); pub struct CounterState(pub SmallVec<[u64; 3]>);
impl CounterState { impl CounterState {
/// Get the initial counter state for the key. /// Get the initial counter state for the key.
pub fn init(page: bool) -> Self { pub fn init(page: bool) -> Self {
// Special case, because pages always start at one. // Special case, because pages always start at one.
Self(smallvec![usize::from(page)]) Self(smallvec![u64::from(page)])
} }
/// Advance the counter and return the numbers for the given heading. /// Advance the counter and return the numbers for the given heading.
@ -645,7 +645,7 @@ impl CounterState {
} }
/// Advance the number of the given level by the specified amount. /// Advance the number of the given level by the specified amount.
pub fn step(&mut self, level: NonZeroUsize, by: usize) { pub fn step(&mut self, level: NonZeroUsize, by: u64) {
let level = level.get(); let level = level.get();
while self.0.len() < level { while self.0.len() < level {
@ -657,7 +657,7 @@ impl CounterState {
} }
/// Get the first number of the state. /// Get the first number of the state.
pub fn first(&self) -> usize { pub fn first(&self) -> u64 {
self.0.first().copied().unwrap_or(1) self.0.first().copied().unwrap_or(1)
} }
@ -675,7 +675,7 @@ impl CounterState {
cast! { cast! {
CounterState, CounterState,
self => Value::Array(self.0.into_iter().map(IntoValue::into_value).collect()), self => Value::Array(self.0.into_iter().map(IntoValue::into_value).collect()),
num: usize => Self(smallvec![num]), num: u64 => Self(smallvec![num]),
array: Array => Self(array array: Array => Self(array
.into_iter() .into_iter()
.map(Value::cast) .map(Value::cast)
@ -758,7 +758,7 @@ impl Show for Packed<CounterDisplayElem> {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct ManualPageCounter { pub struct ManualPageCounter {
physical: NonZeroUsize, physical: NonZeroUsize,
logical: usize, logical: u64,
} }
impl ManualPageCounter { impl ManualPageCounter {
@ -773,7 +773,7 @@ impl ManualPageCounter {
} }
/// Get the current logical page counter state. /// Get the current logical page counter state.
pub fn logical(&self) -> usize { pub fn logical(&self) -> u64 {
self.logical self.logical
} }

View File

@ -1,6 +1,6 @@
pub mod resolve; pub mod resolve;
use std::num::NonZeroUsize; use std::num::{NonZeroU32, NonZeroUsize};
use std::sync::Arc; use std::sync::Arc;
use comemo::Track; use comemo::Track;
@ -468,6 +468,17 @@ pub struct GridHeader {
#[default(true)] #[default(true)]
pub repeat: bool, pub repeat: bool,
/// The level of the header. Must not be zero.
///
/// This allows repeating multiple headers at once. Headers with different
/// levels can repeat together, as long as they have ascending levels.
///
/// Notably, when a header with a lower level starts repeating, all higher
/// or equal level headers stop repeating (they are "replaced" by the new
/// header).
#[default(NonZeroU32::ONE)]
pub level: NonZeroU32,
/// The cells and lines within the header. /// The cells and lines within the header.
#[variadic] #[variadic]
pub children: Vec<GridItem>, pub children: Vec<GridItem>,
@ -755,7 +766,14 @@ impl Show for Packed<GridCell> {
impl Default for Packed<GridCell> { impl Default for Packed<GridCell> {
fn default() -> Self { fn default() -> Self {
Packed::new(GridCell::new(Content::default())) Packed::new(
// Explicitly set colspan and rowspan to ensure they won't be
// overridden by set rules (default cells are created after
// colspans and rowspans are processed in the resolver)
GridCell::new(Content::default())
.with_colspan(NonZeroUsize::ONE)
.with_rowspan(NonZeroUsize::ONE),
)
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -22,7 +22,8 @@ use crate::layout::{BlockElem, Size};
/// #let text = lorem(30) /// #let text = lorem(30)
/// #layout(size => [ /// #layout(size => [
/// #let (height,) = measure( /// #let (height,) = measure(
/// block(width: size.width, text), /// width: size.width,
/// text,
/// ) /// )
/// This text is #height high with /// This text is #height high with
/// the current page width: \ /// the current page width: \
@ -40,8 +41,23 @@ use crate::layout::{BlockElem, Size};
/// receives the page's dimensions minus its margins. This is mostly useful in /// receives the page's dimensions minus its margins. This is mostly useful in
/// combination with [measurement]($measure). /// combination with [measurement]($measure).
/// ///
/// You can also use this function to resolve [`ratio`] to fixed lengths. This /// To retrieve the _remaining_ height of the page rather than its full size,
/// might come in handy if you're building your own layout abstractions. /// you can wrap your `layout` call in a `{block(height: 1fr)}`. This works
/// because the block automatically grows to fill the remaining space (see the
/// [fraction] documentation for more details).
///
/// ```example
/// #set page(height: 150pt)
///
/// #lorem(20)
///
/// #block(height: 1fr, layout(size => [
/// Remaining height: #size.height
/// ]))
/// ```
///
/// You can also use this function to resolve a [`ratio`] to a fixed length.
/// This might come in handy if you're building your own layout abstractions.
/// ///
/// ```example /// ```example
/// #layout(size => { /// #layout(size => {

View File

@ -1,16 +1,14 @@
use std::borrow::Cow;
use std::num::NonZeroUsize; use std::num::NonZeroUsize;
use std::ops::RangeInclusive; use std::ops::RangeInclusive;
use std::str::FromStr; use std::str::FromStr;
use comemo::Track;
use typst_utils::{singleton, NonZeroExt, Scalar}; use typst_utils::{singleton, NonZeroExt, Scalar};
use crate::diag::{bail, SourceResult}; use crate::diag::{bail, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{ use crate::foundations::{
cast, elem, Args, AutoValue, Cast, Construct, Content, Context, Dict, Fold, Func, cast, elem, Args, AutoValue, Cast, Construct, Content, Dict, Fold, NativeElement,
NativeElement, Set, Smart, StyleChain, Value, Set, Smart, Value,
}; };
use crate::introspection::Introspector; use crate::introspection::Introspector;
use crate::layout::{ use crate::layout::{
@ -75,9 +73,10 @@ pub struct PageElem {
/// The height of the page. /// The height of the page.
/// ///
/// If this is set to `{auto}`, page breaks can only be triggered manually /// If this is set to `{auto}`, page breaks can only be triggered manually
/// by inserting a [page break]($pagebreak). Most examples throughout this /// by inserting a [page break]($pagebreak) or by adding another non-empty
/// documentation use `{auto}` for the height of the page to dynamically /// page set rule. Most examples throughout this documentation use `{auto}`
/// grow and shrink to fit their content. /// for the height of the page to dynamically grow and shrink to fit their
/// content.
#[resolve] #[resolve]
#[parse( #[parse(
args.named("height")? args.named("height")?
@ -483,7 +482,7 @@ pub struct Page {
pub supplement: Content, pub supplement: Content,
/// The logical page number (controlled by `counter(page)` and may thus not /// The logical page number (controlled by `counter(page)` and may thus not
/// match the physical number). /// match the physical number).
pub number: usize, pub number: u64,
} }
impl Page { impl Page {
@ -648,43 +647,6 @@ cast! {
}, },
} }
/// A header, footer, foreground or background definition.
#[derive(Debug, Clone, Hash)]
pub enum Marginal {
/// Bare content.
Content(Content),
/// A closure mapping from a page number to content.
Func(Func),
}
impl Marginal {
/// Resolve the marginal based on the page number.
pub fn resolve(
&self,
engine: &mut Engine,
styles: StyleChain,
page: usize,
) -> SourceResult<Cow<'_, Content>> {
Ok(match self {
Self::Content(content) => Cow::Borrowed(content),
Self::Func(func) => Cow::Owned(
func.call(engine, Context::new(None, Some(styles)).track(), [page])?
.display(),
),
})
}
}
cast! {
Marginal,
self => match self {
Self::Content(v) => v.into_value(),
Self::Func(v) => v.into_value(),
},
v: Content => Self::Content(v),
v: Func => Self::Func(v),
}
/// A list of page ranges to be exported. /// A list of page ranges to be exported.
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PageRanges(Vec<PageRange>); pub struct PageRanges(Vec<PageRange>);

View File

@ -8,15 +8,35 @@ use crate::foundations::{repr, ty, Repr};
/// A ratio of a whole. /// A ratio of a whole.
/// ///
/// Written as a number, followed by a percent sign. /// A ratio is written as a number, followed by a percent sign. Ratios most
/// often appear as part of a [relative length]($relative), to specify the size
/// of some layout element relative to the page or some container.
/// ///
/// # Example
/// ```example /// ```example
/// #set align(center) /// #rect(width: 25%)
/// #scale(x: 150%)[
/// Scaled apart.
/// ]
/// ``` /// ```
///
/// However, they can also describe any other property that is relative to some
/// base, e.g. an amount of [horizontal scaling]($scale.x) or the
/// [height of parentheses]($math.lr.size) relative to the height of the content
/// they enclose.
///
/// # Scripting
/// Within your own code, you can use ratios as you like. You can multiply them
/// with various other types as shown below:
///
/// | Multiply by | Example | Result |
/// |-----------------|-------------------------|-----------------|
/// | [`ratio`] | `{27% * 10%}` | `{2.7%}` |
/// | [`length`] | `{27% * 100pt}` | `{27pt}` |
/// | [`relative`] | `{27% * (10% + 100pt)}` | `{2.7% + 27pt}` |
/// | [`angle`] | `{27% * 100deg}` | `{27deg}` |
/// | [`int`] | `{27% * 2}` | `{54%}` |
/// | [`float`] | `{27% * 0.37037}` | `{10%}` |
/// | [`fraction`] | `{27% * 3fr}` | `{0.81fr}` |
///
/// When ratios are displayed in the document, they are rounded to two
/// significant digits for readability.
#[ty(cast)] #[ty(cast)]
#[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Default, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Ratio(Scalar); pub struct Ratio(Scalar);

View File

@ -14,17 +14,58 @@ use crate::layout::{Abs, Em, Length, Ratio};
/// addition and subtraction of a length and a ratio. Wherever a relative length /// addition and subtraction of a length and a ratio. Wherever a relative length
/// is expected, you can also use a bare length or ratio. /// is expected, you can also use a bare length or ratio.
/// ///
/// # Example /// # Relative to the page
/// ```example /// A common use case is setting the width or height of a layout element (e.g.,
/// #rect(width: 100% - 50pt) /// [block], [rect], etc.) as a certain percentage of the width of the page.
/// Here, the rectangle's width is set to `{25%}`, so it takes up one fourth of
/// the page's _inner_ width (the width minus margins).
/// ///
/// #(100% - 50pt).length \ /// ```example
/// #(100% - 50pt).ratio /// #rect(width: 25%)
/// ``` /// ```
/// ///
/// Bare lengths or ratios are always valid where relative lengths are expected,
/// but the two can also be freely mixed:
/// ```example
/// #rect(width: 25% + 1cm)
/// ```
///
/// If you're trying to size an element so that it takes up the page's _full_
/// width, you have a few options (this highly depends on your exact use case):
///
/// 1. Set page margins to `{0pt}` (`[#set page(margin: 0pt)]`)
/// 2. Multiply the ratio by the known full page width (`{21cm * 69%}`)
/// 3. Use padding which will negate the margins (`[#pad(x: -2.5cm, ...)]`)
/// 4. Use the page [background](page.background) or
/// [foreground](page.foreground) field as those don't take margins into
/// account (note that it will render the content outside of the document
/// flow, see [place] to control the content position)
///
/// # Relative to a container
/// When a layout element (e.g. a [rect]) is nested in another layout container
/// (e.g. a [block]) instead of being a direct descendant of the page, relative
/// widths become relative to the container:
///
/// ```example
/// #block(
/// width: 100pt,
/// fill: aqua,
/// rect(width: 50%),
/// )
/// ```
///
/// # Scripting
/// You can multiply relative lengths by [ratios]($ratio), [integers]($int), and
/// [floats]($float).
///
/// A relative length has the following fields: /// A relative length has the following fields:
/// - `length`: Its length component. /// - `length`: Its length component.
/// - `ratio`: Its ratio component. /// - `ratio`: Its ratio component.
///
/// ```example
/// #(100% - 50pt).length \
/// #(100% - 50pt).ratio
/// ```
#[ty(cast, name = "relative", title = "Relative Length")] #[ty(cast, name = "relative", title = "Relative Length")]
#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Default, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Rel<T: Numeric = Length> { pub struct Rel<T: Numeric = Length> {

View File

@ -307,6 +307,20 @@ impl Transform {
Self { sx, sy, ..Self::identity() } Self { sx, sy, ..Self::identity() }
} }
/// A scale transform at a specific position.
pub fn scale_at(sx: Ratio, sy: Ratio, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::scale(sx, sy))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform at a specific position.
pub fn rotate_at(angle: Angle, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::rotate(angle))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform. /// A rotate transform.
pub fn rotate(angle: Angle) -> Self { pub fn rotate(angle: Angle) -> Self {
let cos = Ratio::new(angle.cos()); let cos = Ratio::new(angle.cos());

View File

@ -23,8 +23,8 @@ pub fn cbor(
/// A [path]($syntax/#paths) to a CBOR file or raw CBOR bytes. /// A [path]($syntax/#paths) to a CBOR file or raw CBOR bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
ciborium::from_reader(data.as_slice()) ciborium::from_reader(loaded.data.as_slice())
.map_err(|err| eco_format!("failed to parse CBOR ({err})")) .map_err(|err| eco_format!("failed to parse CBOR ({err})"))
.at(source.span) .at(source.span)
} }

View File

@ -1,7 +1,7 @@
use ecow::{eco_format, EcoString}; use az::SaturatingAs;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use crate::diag::{bail, At, SourceResult}; use crate::diag::{bail, LineCol, LoadError, LoadedWithin, ReportPos, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{cast, func, scope, Array, Dict, IntoValue, Type, Value}; use crate::foundations::{cast, func, scope, Array, Dict, IntoValue, Type, Value};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, Readable};
@ -44,7 +44,7 @@ pub fn csv(
#[default(RowType::Array)] #[default(RowType::Array)]
row_type: RowType, row_type: RowType,
) -> SourceResult<Array> { ) -> SourceResult<Array> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
let mut builder = ::csv::ReaderBuilder::new(); let mut builder = ::csv::ReaderBuilder::new();
let has_headers = row_type == RowType::Dict; let has_headers = row_type == RowType::Dict;
@ -53,7 +53,7 @@ pub fn csv(
// Counting lines from 1 by default. // Counting lines from 1 by default.
let mut line_offset: usize = 1; let mut line_offset: usize = 1;
let mut reader = builder.from_reader(data.as_slice()); let mut reader = builder.from_reader(loaded.data.as_slice());
let mut headers: Option<::csv::StringRecord> = None; let mut headers: Option<::csv::StringRecord> = None;
if has_headers { if has_headers {
@ -62,9 +62,9 @@ pub fn csv(
headers = Some( headers = Some(
reader reader
.headers() .headers()
.cloned()
.map_err(|err| format_csv_error(err, 1)) .map_err(|err| format_csv_error(err, 1))
.at(source.span)? .within(&loaded)?,
.clone(),
); );
} }
@ -74,7 +74,7 @@ pub fn csv(
// incorrect with `has_headers` set to `false`. See issue: // incorrect with `has_headers` set to `false`. See issue:
// https://github.com/BurntSushi/rust-csv/issues/184 // https://github.com/BurntSushi/rust-csv/issues/184
let line = line + line_offset; let line = line + line_offset;
let row = result.map_err(|err| format_csv_error(err, line)).at(source.span)?; let row = result.map_err(|err| format_csv_error(err, line)).within(&loaded)?;
let item = if let Some(headers) = &headers { let item = if let Some(headers) = &headers {
let mut dict = Dict::new(); let mut dict = Dict::new();
for (field, value) in headers.iter().zip(&row) { for (field, value) in headers.iter().zip(&row) {
@ -164,15 +164,23 @@ cast! {
} }
/// Format the user-facing CSV error message. /// Format the user-facing CSV error message.
fn format_csv_error(err: ::csv::Error, line: usize) -> EcoString { fn format_csv_error(err: ::csv::Error, line: usize) -> LoadError {
let msg = "failed to parse CSV";
let pos = (err.kind().position())
.map(|pos| {
let start = pos.byte().saturating_as();
ReportPos::from(start..start)
})
.unwrap_or(LineCol::one_based(line, 1).into());
match err.kind() { match err.kind() {
::csv::ErrorKind::Utf8 { .. } => "file is not valid utf-8".into(), ::csv::ErrorKind::Utf8 { .. } => {
::csv::ErrorKind::UnequalLengths { expected_len, len, .. } => { LoadError::new(pos, msg, "file is not valid utf-8")
eco_format!(
"failed to parse CSV (found {len} instead of \
{expected_len} fields in line {line})"
)
} }
_ => eco_format!("failed to parse CSV ({err})"), ::csv::ErrorKind::UnequalLengths { expected_len, len, .. } => {
let err =
format!("found {len} instead of {expected_len} fields in line {line}");
LoadError::new(pos, msg, err)
}
_ => LoadError::new(pos, "failed to parse CSV", err),
} }
} }

View File

@ -1,7 +1,7 @@
use ecow::eco_format; use ecow::eco_format;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use crate::diag::{At, SourceResult}; use crate::diag::{At, LineCol, LoadError, LoadedWithin, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{func, scope, Str, Value}; use crate::foundations::{func, scope, Str, Value};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, Readable};
@ -54,10 +54,13 @@ pub fn json(
/// A [path]($syntax/#paths) to a JSON file or raw JSON bytes. /// A [path]($syntax/#paths) to a JSON file or raw JSON bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
serde_json::from_slice(data.as_slice()) serde_json::from_slice(loaded.data.as_slice())
.map_err(|err| eco_format!("failed to parse JSON ({err})")) .map_err(|err| {
.at(source.span) let pos = LineCol::one_based(err.line(), err.column());
LoadError::new(pos, "failed to parse JSON", err)
})
.within(&loaded)
} }
#[scope] #[scope]

View File

@ -17,7 +17,7 @@ mod yaml_;
use comemo::Tracked; use comemo::Tracked;
use ecow::EcoString; use ecow::EcoString;
use typst_syntax::Spanned; use typst_syntax::{FileId, Spanned};
pub use self::cbor_::*; pub use self::cbor_::*;
pub use self::csv_::*; pub use self::csv_::*;
@ -74,39 +74,44 @@ pub trait Load {
} }
impl Load for Spanned<DataSource> { impl Load for Spanned<DataSource> {
type Output = Bytes; type Output = Loaded;
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Bytes> { fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Self::Output> {
self.as_ref().load(world) self.as_ref().load(world)
} }
} }
impl Load for Spanned<&DataSource> { impl Load for Spanned<&DataSource> {
type Output = Bytes; type Output = Loaded;
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Bytes> { fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Self::Output> {
match &self.v { match &self.v {
DataSource::Path(path) => { DataSource::Path(path) => {
let file_id = self.span.resolve_path(path).at(self.span)?; let file_id = self.span.resolve_path(path).at(self.span)?;
world.file(file_id).at(self.span) let data = world.file(file_id).at(self.span)?;
let source = Spanned::new(LoadSource::Path(file_id), self.span);
Ok(Loaded::new(source, data))
}
DataSource::Bytes(data) => {
let source = Spanned::new(LoadSource::Bytes, self.span);
Ok(Loaded::new(source, data.clone()))
} }
DataSource::Bytes(bytes) => Ok(bytes.clone()),
} }
} }
} }
impl Load for Spanned<OneOrMultiple<DataSource>> { impl Load for Spanned<OneOrMultiple<DataSource>> {
type Output = Vec<Bytes>; type Output = Vec<Loaded>;
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Vec<Bytes>> { fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Self::Output> {
self.as_ref().load(world) self.as_ref().load(world)
} }
} }
impl Load for Spanned<&OneOrMultiple<DataSource>> { impl Load for Spanned<&OneOrMultiple<DataSource>> {
type Output = Vec<Bytes>; type Output = Vec<Loaded>;
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Vec<Bytes>> { fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Self::Output> {
self.v self.v
.0 .0
.iter() .iter()
@ -115,6 +120,28 @@ impl Load for Spanned<&OneOrMultiple<DataSource>> {
} }
} }
/// Data loaded from a [`DataSource`].
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct Loaded {
/// Details about where `data` was loaded from.
pub source: Spanned<LoadSource>,
/// The loaded data.
pub data: Bytes,
}
impl Loaded {
pub fn new(source: Spanned<LoadSource>, bytes: Bytes) -> Self {
Self { source, data: bytes }
}
}
/// A loaded [`DataSource`].
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum LoadSource {
Path(FileId),
Bytes,
}
/// A value that can be read from a file. /// A value that can be read from a file.
#[derive(Debug, Clone, PartialEq, Hash)] #[derive(Debug, Clone, PartialEq, Hash)]
pub enum Readable { pub enum Readable {

View File

@ -1,11 +1,10 @@
use ecow::EcoString; use ecow::EcoString;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use crate::diag::{At, FileError, SourceResult}; use crate::diag::{LoadedWithin, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{func, Cast}; use crate::foundations::{func, Cast};
use crate::loading::Readable; use crate::loading::{DataSource, Load, Readable};
use crate::World;
/// Reads plain text or data from a file. /// Reads plain text or data from a file.
/// ///
@ -36,14 +35,10 @@ pub fn read(
#[default(Some(Encoding::Utf8))] #[default(Some(Encoding::Utf8))]
encoding: Option<Encoding>, encoding: Option<Encoding>,
) -> SourceResult<Readable> { ) -> SourceResult<Readable> {
let Spanned { v: path, span } = path; let loaded = path.map(DataSource::Path).load(engine.world)?;
let id = span.resolve_path(&path).at(span)?;
let data = engine.world.file(id).at(span)?;
Ok(match encoding { Ok(match encoding {
None => Readable::Bytes(data), None => Readable::Bytes(loaded.data),
Some(Encoding::Utf8) => { Some(Encoding::Utf8) => Readable::Str(loaded.data.to_str().within(&loaded)?),
Readable::Str(data.to_str().map_err(FileError::from).at(span)?)
}
}) })
} }

View File

@ -1,7 +1,7 @@
use ecow::{eco_format, EcoString}; use ecow::eco_format;
use typst_syntax::{is_newline, Spanned}; use typst_syntax::Spanned;
use crate::diag::{At, FileError, SourceResult}; use crate::diag::{At, LoadError, LoadedWithin, ReportPos, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{func, scope, Str, Value}; use crate::foundations::{func, scope, Str, Value};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, Readable};
@ -32,11 +32,9 @@ pub fn toml(
/// A [path]($syntax/#paths) to a TOML file or raw TOML bytes. /// A [path]($syntax/#paths) to a TOML file or raw TOML bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
let raw = data.as_str().map_err(FileError::from).at(source.span)?; let raw = loaded.data.as_str().within(&loaded)?;
::toml::from_str(raw) ::toml::from_str(raw).map_err(format_toml_error).within(&loaded)
.map_err(|err| format_toml_error(err, raw))
.at(source.span)
} }
#[scope] #[scope]
@ -71,15 +69,7 @@ impl toml {
} }
/// Format the user-facing TOML error message. /// Format the user-facing TOML error message.
fn format_toml_error(error: ::toml::de::Error, raw: &str) -> EcoString { fn format_toml_error(error: ::toml::de::Error) -> LoadError {
if let Some(head) = error.span().and_then(|range| raw.get(..range.start)) { let pos = error.span().map(ReportPos::from).unwrap_or_default();
let line = head.lines().count(); LoadError::new(pos, "failed to parse TOML", error.message())
let column = 1 + head.chars().rev().take_while(|&c| !is_newline(c)).count();
eco_format!(
"failed to parse TOML ({} at line {line} column {column})",
error.message(),
)
} else {
eco_format!("failed to parse TOML ({})", error.message())
}
} }

View File

@ -1,8 +1,7 @@
use ecow::EcoString;
use roxmltree::ParsingOptions; use roxmltree::ParsingOptions;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use crate::diag::{format_xml_like_error, At, FileError, SourceResult}; use crate::diag::{format_xml_like_error, LoadError, LoadedWithin, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{dict, func, scope, Array, Dict, IntoValue, Str, Value}; use crate::foundations::{dict, func, scope, Array, Dict, IntoValue, Str, Value};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, Readable};
@ -61,14 +60,14 @@ pub fn xml(
/// A [path]($syntax/#paths) to an XML file or raw XML bytes. /// A [path]($syntax/#paths) to an XML file or raw XML bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
let text = data.as_str().map_err(FileError::from).at(source.span)?; let text = loaded.data.as_str().within(&loaded)?;
let document = roxmltree::Document::parse_with_options( let document = roxmltree::Document::parse_with_options(
text, text,
ParsingOptions { allow_dtd: true, ..Default::default() }, ParsingOptions { allow_dtd: true, ..Default::default() },
) )
.map_err(format_xml_error) .map_err(format_xml_error)
.at(source.span)?; .within(&loaded)?;
Ok(convert_xml(document.root())) Ok(convert_xml(document.root()))
} }
@ -111,6 +110,6 @@ fn convert_xml(node: roxmltree::Node) -> Value {
} }
/// Format the user-facing XML error message. /// Format the user-facing XML error message.
fn format_xml_error(error: roxmltree::Error) -> EcoString { fn format_xml_error(error: roxmltree::Error) -> LoadError {
format_xml_like_error("XML", error) format_xml_like_error("XML", error)
} }

View File

@ -1,7 +1,7 @@
use ecow::eco_format; use ecow::eco_format;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use crate::diag::{At, SourceResult}; use crate::diag::{At, LineCol, LoadError, LoadedWithin, ReportPos, SourceResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{func, scope, Str, Value}; use crate::foundations::{func, scope, Str, Value};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, Readable};
@ -44,10 +44,10 @@ pub fn yaml(
/// A [path]($syntax/#paths) to a YAML file or raw YAML bytes. /// A [path]($syntax/#paths) to a YAML file or raw YAML bytes.
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
serde_yaml::from_slice(data.as_slice()) serde_yaml::from_slice(loaded.data.as_slice())
.map_err(|err| eco_format!("failed to parse YAML ({err})")) .map_err(format_yaml_error)
.at(source.span) .within(&loaded)
} }
#[scope] #[scope]
@ -76,3 +76,16 @@ impl yaml {
.at(span) .at(span)
} }
} }
/// Format the user-facing YAML error message.
pub fn format_yaml_error(error: serde_yaml::Error) -> LoadError {
let pos = error
.location()
.map(|loc| {
let line_col = LineCol::one_based(loc.line(), loc.column());
let range = loc.index()..loc.index();
ReportPos::full(range, line_col)
})
.unwrap_or_default();
LoadError::new(pos, "failed to parse YAML", error)
}

View File

@ -13,8 +13,8 @@ use crate::math::Mathy;
/// ``` /// ```
#[elem(Mathy)] #[elem(Mathy)]
pub struct AccentElem { pub struct AccentElem {
/// The base to which the accent is applied. /// The base to which the accent is applied. May consist of multiple
/// May consist of multiple letters. /// letters.
/// ///
/// ```example /// ```example
/// $arrow(A B C)$ /// $arrow(A B C)$
@ -51,9 +51,24 @@ pub struct AccentElem {
pub accent: Accent, pub accent: Accent,
/// The size of the accent, relative to the width of the base. /// The size of the accent, relative to the width of the base.
///
/// ```example
/// $dash(A, size: #150%)$
/// ```
#[resolve] #[resolve]
#[default(Rel::one())] #[default(Rel::one())]
pub size: Rel<Length>, pub size: Rel<Length>,
/// Whether to remove the dot on top of lowercase i and j when adding a top
/// accent.
///
/// This enables the `dtls` OpenType feature.
///
/// ```example
/// $hat(dotless: #false, i)$
/// ```
#[default(true)]
pub dotless: bool,
} }
/// An accent character. /// An accent character.
@ -65,6 +80,19 @@ impl Accent {
pub fn new(c: char) -> Self { pub fn new(c: char) -> Self {
Self(Self::combine(c).unwrap_or(c)) Self(Self::combine(c).unwrap_or(c))
} }
/// List of bottom accents. Currently just a list of ones included in the
/// Unicode math class document.
const BOTTOM: &[char] = &[
'\u{0323}', '\u{032C}', '\u{032D}', '\u{032E}', '\u{032F}', '\u{0330}',
'\u{0331}', '\u{0332}', '\u{0333}', '\u{033A}', '\u{20E8}', '\u{20EC}',
'\u{20ED}', '\u{20EE}', '\u{20EF}',
];
/// Whether this accent is a bottom accent or not.
pub fn is_bottom(&self) -> bool {
Self::BOTTOM.contains(&self.0)
}
} }
/// This macro generates accent-related functions. /// This macro generates accent-related functions.
@ -103,11 +131,18 @@ macro_rules! accents {
/// The size of the accent, relative to the width of the base. /// The size of the accent, relative to the width of the base.
#[named] #[named]
size: Option<Rel<Length>>, size: Option<Rel<Length>>,
/// Whether to remove the dot on top of lowercase i and j when
/// adding a top accent.
#[named]
dotless: Option<bool>,
) -> Content { ) -> Content {
let mut accent = AccentElem::new(base, Accent::new($primary)); let mut accent = AccentElem::new(base, Accent::new($primary));
if let Some(size) = size { if let Some(size) = size {
accent = accent.with_size(size); accent = accent.with_size(size);
} }
if let Some(dotless) = dotless {
accent = accent.with_dotless(dotless);
}
accent.pack() accent.pack()
} }
)+ )+

View File

@ -15,7 +15,7 @@ use crate::math::Mathy;
/// # Syntax /// # Syntax
/// This function also has dedicated syntax: Use a slash to turn neighbouring /// This function also has dedicated syntax: Use a slash to turn neighbouring
/// expressions into a fraction. Multiple atoms can be grouped into a single /// expressions into a fraction. Multiple atoms can be grouped into a single
/// expression using round grouping parenthesis. Such parentheses are removed /// expression using round grouping parentheses. Such parentheses are removed
/// from the output, but you can nest multiple to force them. /// from the output, but you can nest multiple to force them.
#[elem(title = "Fraction", Mathy)] #[elem(title = "Fraction", Mathy)]
pub struct FracElem { pub struct FracElem {

View File

@ -6,7 +6,7 @@ use std::num::NonZeroUsize;
use std::path::Path; use std::path::Path;
use std::sync::{Arc, LazyLock}; use std::sync::{Arc, LazyLock};
use comemo::Tracked; use comemo::{Track, Tracked};
use ecow::{eco_format, EcoString, EcoVec}; use ecow::{eco_format, EcoString, EcoVec};
use hayagriva::archive::ArchivedStyle; use hayagriva::archive::ArchivedStyle;
use hayagriva::io::BibLaTeXError; use hayagriva::io::BibLaTeXError;
@ -19,8 +19,11 @@ use smallvec::{smallvec, SmallVec};
use typst_syntax::{Span, Spanned}; use typst_syntax::{Span, Spanned};
use typst_utils::{Get, ManuallyHash, NonZeroExt, PicoStr}; use typst_utils::{Get, ManuallyHash, NonZeroExt, PicoStr};
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult}; use crate::diag::{
use crate::engine::Engine; bail, error, At, HintedStrResult, LoadError, LoadResult, LoadedWithin, ReportPos,
SourceResult, StrResult,
};
use crate::engine::{Engine, Sink};
use crate::foundations::{ use crate::foundations::{
elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement, elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement,
OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles, OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles,
@ -31,7 +34,7 @@ use crate::layout::{
BlockBody, BlockElem, Em, GridCell, GridChild, GridElem, GridItem, HElem, PadElem, BlockBody, BlockElem, Em, GridCell, GridChild, GridElem, GridItem, HElem, PadElem,
Sides, Sizing, TrackSizings, Sides, Sizing, TrackSizings,
}; };
use crate::loading::{DataSource, Load}; use crate::loading::{format_yaml_error, DataSource, Load, LoadSource, Loaded};
use crate::model::{ use crate::model::{
CitationForm, CiteGroup, Destination, FootnoteElem, HeadingElem, LinkElem, ParElem, CitationForm, CiteGroup, Destination, FootnoteElem, HeadingElem, LinkElem, ParElem,
Url, Url,
@ -48,8 +51,8 @@ use crate::World;
/// You can create a new bibliography by calling this function with a path /// You can create a new bibliography by calling this function with a path
/// to a bibliography file in either one of two formats: /// to a bibliography file in either one of two formats:
/// ///
/// - A Hayagriva `.yml` file. Hayagriva is a new bibliography file format /// - A Hayagriva `.yaml`/`.yml` file. Hayagriva is a new bibliography
/// designed for use with Typst. Visit its /// file format designed for use with Typst. Visit its
/// [documentation](https://github.com/typst/hayagriva/blob/main/docs/file-format.md) /// [documentation](https://github.com/typst/hayagriva/blob/main/docs/file-format.md)
/// for more details. /// for more details.
/// - A BibLaTeX `.bib` file. /// - A BibLaTeX `.bib` file.
@ -224,7 +227,15 @@ impl Show for Packed<BibliographyElem> {
let references = works let references = works
.references .references
.as_ref() .as_ref()
.ok_or("CSL style is not suitable for bibliographies") .ok_or_else(|| match self.style(styles).source {
CslSource::Named(style) => eco_format!(
"CSL style \"{}\" is not suitable for bibliographies",
style.display_name()
),
CslSource::Normal(..) => {
"CSL style is not suitable for bibliographies".into()
}
})
.at(span)?; .at(span)?;
if references.iter().any(|(prefix, _)| prefix.is_some()) { if references.iter().any(|(prefix, _)| prefix.is_some()) {
@ -294,24 +305,21 @@ impl Bibliography {
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
sources: Spanned<OneOrMultiple<DataSource>>, sources: Spanned<OneOrMultiple<DataSource>>,
) -> SourceResult<Derived<OneOrMultiple<DataSource>, Self>> { ) -> SourceResult<Derived<OneOrMultiple<DataSource>, Self>> {
let data = sources.load(world)?; let loaded = sources.load(world)?;
let bibliography = Self::decode(&sources.v, &data).at(sources.span)?; let bibliography = Self::decode(&loaded)?;
Ok(Derived::new(sources.v, bibliography)) Ok(Derived::new(sources.v, bibliography))
} }
/// Decode a bibliography from loaded data sources. /// Decode a bibliography from loaded data sources.
#[comemo::memoize] #[comemo::memoize]
#[typst_macros::time(name = "load bibliography")] #[typst_macros::time(name = "load bibliography")]
fn decode( fn decode(data: &[Loaded]) -> SourceResult<Bibliography> {
sources: &OneOrMultiple<DataSource>,
data: &[Bytes],
) -> StrResult<Bibliography> {
let mut map = IndexMap::new(); let mut map = IndexMap::new();
let mut duplicates = Vec::<EcoString>::new(); let mut duplicates = Vec::<EcoString>::new();
// We might have multiple bib/yaml files // We might have multiple bib/yaml files
for (source, data) in sources.0.iter().zip(data) { for d in data.iter() {
let library = decode_library(source, data)?; let library = decode_library(d)?;
for entry in library { for entry in library {
match map.entry(Label::new(PicoStr::intern(entry.key()))) { match map.entry(Label::new(PicoStr::intern(entry.key()))) {
indexmap::map::Entry::Vacant(vacant) => { indexmap::map::Entry::Vacant(vacant) => {
@ -325,7 +333,11 @@ impl Bibliography {
} }
if !duplicates.is_empty() { if !duplicates.is_empty() {
bail!("duplicate bibliography keys: {}", duplicates.join(", ")); // TODO: Store spans of entries for duplicate key error messages.
// Requires hayagriva entries to store their location, which should
// be fine, since they are 1kb anyway.
let span = data.first().unwrap().source.span;
bail!(span, "duplicate bibliography keys: {}", duplicates.join(", "));
} }
Ok(Bibliography(Arc::new(ManuallyHash::new(map, typst_utils::hash128(data))))) Ok(Bibliography(Arc::new(ManuallyHash::new(map, typst_utils::hash128(data)))))
@ -351,36 +363,47 @@ impl Debug for Bibliography {
} }
/// Decode on library from one data source. /// Decode on library from one data source.
fn decode_library(source: &DataSource, data: &Bytes) -> StrResult<Library> { fn decode_library(loaded: &Loaded) -> SourceResult<Library> {
let src = data.as_str().map_err(FileError::from)?; let data = loaded.data.as_str().within(loaded)?;
if let DataSource::Path(path) = source { if let LoadSource::Path(file_id) = loaded.source.v {
// If we got a path, use the extension to determine whether it is // If we got a path, use the extension to determine whether it is
// YAML or BibLaTeX. // YAML or BibLaTeX.
let ext = Path::new(path.as_str()) let ext = file_id
.vpath()
.as_rooted_path()
.extension() .extension()
.and_then(OsStr::to_str) .and_then(OsStr::to_str)
.unwrap_or_default(); .unwrap_or_default();
match ext.to_lowercase().as_str() { match ext.to_lowercase().as_str() {
"yml" | "yaml" => hayagriva::io::from_yaml_str(src) "yml" | "yaml" => hayagriva::io::from_yaml_str(data)
.map_err(|err| eco_format!("failed to parse YAML ({err})")), .map_err(format_yaml_error)
"bib" => hayagriva::io::from_biblatex_str(src) .within(loaded),
.map_err(|errors| format_biblatex_error(src, Some(path), errors)), "bib" => hayagriva::io::from_biblatex_str(data)
_ => bail!("unknown bibliography format (must be .yml/.yaml or .bib)"), .map_err(format_biblatex_error)
.within(loaded),
_ => bail!(
loaded.source.span,
"unknown bibliography format (must be .yml/.yaml or .bib)"
),
} }
} else { } else {
// If we just got bytes, we need to guess. If it can be decoded as // If we just got bytes, we need to guess. If it can be decoded as
// hayagriva YAML, we'll use that. // hayagriva YAML, we'll use that.
let haya_err = match hayagriva::io::from_yaml_str(src) { let haya_err = match hayagriva::io::from_yaml_str(data) {
Ok(library) => return Ok(library), Ok(library) => return Ok(library),
Err(err) => err, Err(err) => err,
}; };
// If it can be decoded as BibLaTeX, we use that isntead. // If it can be decoded as BibLaTeX, we use that isntead.
let bib_errs = match hayagriva::io::from_biblatex_str(src) { let bib_errs = match hayagriva::io::from_biblatex_str(data) {
Ok(library) => return Ok(library), // If the file is almost valid yaml, but contains no `@` character
Err(err) => err, // it will be successfully parsed as an empty BibLaTeX library,
// since BibLaTeX does support arbitrary text outside of entries.
Ok(library) if !library.is_empty() => return Ok(library),
Ok(_) => None,
Err(err) => Some(err),
}; };
// If neither decoded correctly, check whether `:` or `{` appears // If neither decoded correctly, check whether `:` or `{` appears
@ -388,7 +411,7 @@ fn decode_library(source: &DataSource, data: &Bytes) -> StrResult<Library> {
// and emit the more appropriate error. // and emit the more appropriate error.
let mut yaml = 0; let mut yaml = 0;
let mut biblatex = 0; let mut biblatex = 0;
for c in src.chars() { for c in data.chars() {
match c { match c {
':' => yaml += 1, ':' => yaml += 1,
'{' => biblatex += 1, '{' => biblatex += 1,
@ -396,37 +419,33 @@ fn decode_library(source: &DataSource, data: &Bytes) -> StrResult<Library> {
} }
} }
if yaml > biblatex { match bib_errs {
bail!("failed to parse YAML ({haya_err})") Some(bib_errs) if biblatex >= yaml => {
} else { Err(format_biblatex_error(bib_errs)).within(loaded)
Err(format_biblatex_error(src, None, bib_errs)) }
_ => Err(format_yaml_error(haya_err)).within(loaded),
} }
} }
} }
/// Format a BibLaTeX loading error. /// Format a BibLaTeX loading error.
fn format_biblatex_error( fn format_biblatex_error(errors: Vec<BibLaTeXError>) -> LoadError {
src: &str, // TODO: return multiple errors?
path: Option<&str>, let Some(error) = errors.into_iter().next() else {
errors: Vec<BibLaTeXError>, // TODO: can this even happen, should we just unwrap?
) -> EcoString { return LoadError::new(
let Some(error) = errors.first() else { ReportPos::None,
return match path { "failed to parse BibLaTeX",
Some(path) => eco_format!("failed to parse BibLaTeX file ({path})"), "something went wrong",
None => eco_format!("failed to parse BibLaTeX"), );
};
}; };
let (span, msg) = match error { let (range, msg) = match error {
BibLaTeXError::Parse(error) => (&error.span, error.kind.to_string()), BibLaTeXError::Parse(error) => (error.span, error.kind.to_string()),
BibLaTeXError::Type(error) => (&error.span, error.kind.to_string()), BibLaTeXError::Type(error) => (error.span, error.kind.to_string()),
}; };
let line = src.get(..span.start).unwrap_or_default().lines().count(); LoadError::new(range, "failed to parse BibLaTeX", msg)
match path {
Some(path) => eco_format!("failed to parse BibLaTeX file ({path}:{line}: {msg})"),
None => eco_format!("failed to parse BibLaTeX ({line}: {msg})"),
}
} }
/// A loaded CSL style. /// A loaded CSL style.
@ -442,8 +461,8 @@ impl CslStyle {
let style = match &source { let style = match &source {
CslSource::Named(style) => Self::from_archived(*style), CslSource::Named(style) => Self::from_archived(*style),
CslSource::Normal(source) => { CslSource::Normal(source) => {
let data = Spanned::new(source, span).load(world)?; let loaded = Spanned::new(source, span).load(world)?;
Self::from_data(data).at(span)? Self::from_data(&loaded.data).within(&loaded)?
} }
}; };
Ok(Derived::new(source, style)) Ok(Derived::new(source, style))
@ -464,16 +483,18 @@ impl CslStyle {
/// Load a CSL style from file contents. /// Load a CSL style from file contents.
#[comemo::memoize] #[comemo::memoize]
pub fn from_data(data: Bytes) -> StrResult<CslStyle> { pub fn from_data(bytes: &Bytes) -> LoadResult<CslStyle> {
let text = data.as_str().map_err(FileError::from)?; let text = bytes.as_str()?;
citationberg::IndependentStyle::from_xml(text) citationberg::IndependentStyle::from_xml(text)
.map(|style| { .map(|style| {
Self(Arc::new(ManuallyHash::new( Self(Arc::new(ManuallyHash::new(
style, style,
typst_utils::hash128(&(TypeId::of::<Bytes>(), data)), typst_utils::hash128(&(TypeId::of::<Bytes>(), bytes)),
))) )))
}) })
.map_err(|err| eco_format!("failed to load CSL style ({err})")) .map_err(|err| {
LoadError::new(ReportPos::None, "failed to load CSL style", err)
})
} }
/// Get the underlying independent style. /// Get the underlying independent style.
@ -999,6 +1020,8 @@ impl ElemRenderer<'_> {
(self.routines.eval_string)( (self.routines.eval_string)(
self.routines, self.routines,
self.world, self.world,
// TODO: propagate warnings
Sink::new().track_mut(),
math, math,
self.span, self.span,
EvalMode::Math, EvalMode::Math,

View File

@ -129,7 +129,7 @@ pub struct EnumElem {
/// [Ahead], /// [Ahead],
/// ) /// )
/// ``` /// ```
pub start: Smart<usize>, pub start: Smart<u64>,
/// Whether to display the full numbering, including the numbers of /// Whether to display the full numbering, including the numbers of
/// all parent enumerations. /// all parent enumerations.
@ -217,7 +217,7 @@ pub struct EnumElem {
#[internal] #[internal]
#[fold] #[fold]
#[ghost] #[ghost]
pub parents: SmallVec<[usize; 4]>, pub parents: SmallVec<[u64; 4]>,
} }
#[scope] #[scope]
@ -259,10 +259,11 @@ impl Show for Packed<EnumElem> {
.spanned(self.span()); .spanned(self.span());
if tight { if tight {
let leading = ParElem::leading_in(styles); let spacing = self
let spacing = .spacing(styles)
VElem::new(leading.into()).with_weak(true).with_attach(true).pack(); .unwrap_or_else(|| ParElem::leading_in(styles).into());
realized = spacing + realized; let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
} }
Ok(realized) Ok(realized)
@ -274,7 +275,7 @@ impl Show for Packed<EnumElem> {
pub struct EnumItem { pub struct EnumItem {
/// The item's number. /// The item's number.
#[positional] #[positional]
pub number: Option<usize>, pub number: Option<u64>,
/// The item's body. /// The item's body.
#[required] #[required]

View File

@ -125,6 +125,9 @@ pub struct FigureElem {
/// ///
/// ```example /// ```example
/// #set page(height: 200pt) /// #set page(height: 200pt)
/// #show figure: set place(
/// clearance: 1em,
/// )
/// ///
/// = Introduction /// = Introduction
/// #figure( /// #figure(
@ -457,7 +460,7 @@ impl Outlinable for Packed<FigureElem> {
/// customize the appearance of captions for all figures or figures of a /// customize the appearance of captions for all figures or figures of a
/// specific kind. /// specific kind.
/// ///
/// In addition to its `pos` and `body`, the `caption` also provides the /// In addition to its `position` and `body`, the `caption` also provides the
/// figure's `kind`, `supplement`, `counter`, and `numbering` as fields. These /// figure's `kind`, `supplement`, `counter`, and `numbering` as fields. These
/// parts can be used in [`where`]($function.where) selectors and show rules to /// parts can be used in [`where`]($function.where) selectors and show rules to
/// build a completely custom caption. /// build a completely custom caption.

View File

@ -166,10 +166,11 @@ impl Show for Packed<ListElem> {
.spanned(self.span()); .spanned(self.span());
if tight { if tight {
let leading = ParElem::leading_in(styles); let spacing = self
let spacing = .spacing(styles)
VElem::new(leading.into()).with_weak(true).with_attach(true).pack(); .unwrap_or_else(|| ParElem::leading_in(styles).into());
realized = spacing + realized; let v = VElem::new(spacing.into()).with_weak(true).with_attach(true).pack();
realized = v + realized;
} }
Ok(realized) Ok(realized)

View File

@ -1,7 +1,7 @@
use std::str::FromStr; use std::str::FromStr;
use chinese_number::{ use chinese_number::{
from_usize_to_chinese_ten_thousand as usize_to_chinese, ChineseCase, ChineseVariant, from_u64_to_chinese_ten_thousand as u64_to_chinese, ChineseCase, ChineseVariant,
}; };
use comemo::Tracked; use comemo::Tracked;
use ecow::{eco_format, EcoString, EcoVec}; use ecow::{eco_format, EcoString, EcoVec};
@ -9,7 +9,6 @@ use ecow::{eco_format, EcoString, EcoVec};
use crate::diag::SourceResult; use crate::diag::SourceResult;
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{cast, func, Context, Func, Str, Value}; use crate::foundations::{cast, func, Context, Func, Str, Value};
use crate::text::Case;
/// Applies a numbering to a sequence of numbers. /// Applies a numbering to a sequence of numbers.
/// ///
@ -85,7 +84,7 @@ pub fn numbering(
/// If `numbering` is a pattern and more numbers than counting symbols are /// If `numbering` is a pattern and more numbers than counting symbols are
/// given, the last counting symbol with its prefix is repeated. /// given, the last counting symbol with its prefix is repeated.
#[variadic] #[variadic]
numbers: Vec<usize>, numbers: Vec<u64>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
numbering.apply(engine, context, &numbers) numbering.apply(engine, context, &numbers)
} }
@ -105,7 +104,7 @@ impl Numbering {
&self, &self,
engine: &mut Engine, engine: &mut Engine,
context: Tracked<Context>, context: Tracked<Context>,
numbers: &[usize], numbers: &[u64],
) -> SourceResult<Value> { ) -> SourceResult<Value> {
Ok(match self { Ok(match self {
Self::Pattern(pattern) => Value::Str(pattern.apply(numbers).into()), Self::Pattern(pattern) => Value::Str(pattern.apply(numbers).into()),
@ -156,7 +155,7 @@ pub struct NumberingPattern {
impl NumberingPattern { impl NumberingPattern {
/// Apply the pattern to the given number. /// Apply the pattern to the given number.
pub fn apply(&self, numbers: &[usize]) -> EcoString { pub fn apply(&self, numbers: &[u64]) -> EcoString {
let mut fmt = EcoString::new(); let mut fmt = EcoString::new();
let mut numbers = numbers.iter(); let mut numbers = numbers.iter();
@ -185,7 +184,7 @@ impl NumberingPattern {
} }
/// Apply only the k-th segment of the pattern to a number. /// Apply only the k-th segment of the pattern to a number.
pub fn apply_kth(&self, k: usize, number: usize) -> EcoString { pub fn apply_kth(&self, k: usize, number: u64) -> EcoString {
let mut fmt = EcoString::new(); let mut fmt = EcoString::new();
if let Some((prefix, _)) = self.pieces.first() { if let Some((prefix, _)) = self.pieces.first() {
fmt.push_str(prefix); fmt.push_str(prefix);
@ -261,9 +260,9 @@ pub enum NumberingKind {
LowerRoman, LowerRoman,
/// Uppercase Roman numerals (I, II, III, etc.). /// Uppercase Roman numerals (I, II, III, etc.).
UpperRoman, UpperRoman,
/// Lowercase Greek numerals (Α, Β, Γ, etc.). /// Lowercase Greek letters (α, β, γ, etc.).
LowerGreek, LowerGreek,
/// Uppercase Greek numerals (α, β, γ, etc.). /// Uppercase Greek letters (Α, Β, Γ, etc.).
UpperGreek, UpperGreek,
/// Paragraph/note-like symbols: *, †, ‡, §, ¶, and ‖. Further items use /// Paragraph/note-like symbols: *, †, ‡, §, ¶, and ‖. Further items use
/// repeated symbols. /// repeated symbols.
@ -379,205 +378,39 @@ impl NumberingKind {
} }
/// Apply the numbering to the given number. /// Apply the numbering to the given number.
pub fn apply(self, n: usize) -> EcoString { pub fn apply(self, n: u64) -> EcoString {
match self { match self {
Self::Arabic => eco_format!("{n}"), Self::Arabic => {
Self::LowerRoman => roman_numeral(n, Case::Lower), numeric(&['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'], n)
Self::UpperRoman => roman_numeral(n, Case::Upper),
Self::LowerGreek => greek_numeral(n, Case::Lower),
Self::UpperGreek => greek_numeral(n, Case::Upper),
Self::Symbol => {
if n == 0 {
return '-'.into();
} }
Self::LowerRoman => additive(
const SYMBOLS: &[char] = &['*', '†', '‡', '§', '¶', '‖']; &[
let symbol = SYMBOLS[(n - 1) % SYMBOLS.len()]; ("", 1000000),
let amount = ((n - 1) / SYMBOLS.len()) + 1; ("", 500000),
std::iter::repeat_n(symbol, amount).collect() ("", 100000),
} ("", 50000),
Self::Hebrew => hebrew_numeral(n), ("", 10000),
("", 5000),
Self::LowerLatin => zeroless( ("i̅v̅", 4000),
[ ("m", 1000),
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', ("cm", 900),
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', ("d", 500),
("cd", 400),
("c", 100),
("xc", 90),
("l", 50),
("xl", 40),
("x", 10),
("ix", 9),
("v", 5),
("iv", 4),
("i", 1),
("n", 0),
], ],
n, n,
), ),
Self::UpperLatin => zeroless( Self::UpperRoman => additive(
[ &[
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
],
n,
),
Self::HiraganaAiueo => zeroless(
[
'あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', 'け', 'こ', 'さ',
'し', 'す', 'せ', 'そ', 'た', 'ち', 'つ', 'て', 'と', 'な', 'に',
'ぬ', 'ね', 'の', 'は', 'ひ', 'ふ', 'へ', 'ほ', 'ま', 'み', 'む',
'め', 'も', 'や', 'ゆ', 'よ', 'ら', 'り', 'る', 'れ', 'ろ', 'わ',
'を', 'ん',
],
n,
),
Self::HiraganaIroha => zeroless(
[
'い', 'ろ', 'は', 'に', 'ほ', 'へ', 'と', 'ち', 'り', 'ぬ', 'る',
'を', 'わ', 'か', 'よ', 'た', 'れ', 'そ', 'つ', 'ね', 'な', 'ら',
'む', 'う', 'ゐ', 'の', 'お', 'く', 'や', 'ま', 'け', 'ふ', 'こ',
'え', 'て', 'あ', 'さ', 'き', 'ゆ', 'め', 'み', 'し', 'ゑ', 'ひ',
'も', 'せ', 'す',
],
n,
),
Self::KatakanaAiueo => zeroless(
[
'ア', 'イ', 'ウ', 'エ', 'オ', 'カ', 'キ', 'ク', 'ケ', 'コ', 'サ',
'シ', 'ス', 'セ', 'ソ', 'タ', 'チ', 'ツ', 'テ', 'ト', 'ナ', 'ニ',
'ヌ', 'ネ', '', 'ハ', 'ヒ', 'フ', 'ヘ', 'ホ', 'マ', 'ミ', 'ム',
'メ', 'モ', 'ヤ', 'ユ', 'ヨ', 'ラ', 'リ', 'ル', 'レ', 'ロ', 'ワ',
'ヲ', 'ン',
],
n,
),
Self::KatakanaIroha => zeroless(
[
'イ', 'ロ', 'ハ', 'ニ', 'ホ', 'ヘ', 'ト', 'チ', 'リ', 'ヌ', 'ル',
'ヲ', 'ワ', 'カ', 'ヨ', 'タ', 'レ', 'ソ', 'ツ', 'ネ', 'ナ', 'ラ',
'ム', 'ウ', 'ヰ', '', 'オ', 'ク', 'ヤ', 'マ', 'ケ', 'フ', 'コ',
'エ', 'テ', 'ア', 'サ', 'キ', 'ユ', 'メ', 'ミ', 'シ', 'ヱ', 'ヒ',
'モ', 'セ', 'ス',
],
n,
),
Self::KoreanJamo => zeroless(
[
'ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ',
'ㅌ', 'ㅍ', 'ㅎ',
],
n,
),
Self::KoreanSyllable => zeroless(
[
'가', '나', '다', '라', '마', '바', '사', '아', '자', '차', '카',
'타', '파', '하',
],
n,
),
Self::BengaliLetter => zeroless(
[
'ক', 'খ', 'গ', 'ঘ', 'ঙ', 'চ', 'ছ', 'জ', 'ঝ', 'ঞ', 'ট', 'ঠ', 'ড', 'ঢ',
'ণ', 'ত', 'থ', 'দ', 'ধ', 'ন', 'প', 'ফ', 'ব', 'ভ', 'ম', 'য', 'র', 'ল',
'শ', 'ষ', 'স', 'হ',
],
n,
),
Self::CircledNumber => zeroless(
[
'①', '②', '③', '④', '⑤', '⑥', '⑦', '⑧', '⑨', '⑩', '⑪', '⑫', '⑬', '⑭',
'⑮', '⑯', '⑰', '⑱', '⑲', '⑳', '㉑', '㉒', '㉓', '㉔', '㉕', '㉖',
'㉗', '㉘', '㉙', '㉚', '㉛', '㉜', '㉝', '㉞', '㉟', '㊱', '㊲',
'㊳', '㊴', '㊵', '㊶', '㊷', '㊸', '㊹', '㊺', '㊻', '㊼', '㊽',
'㊾', '㊿',
],
n,
),
Self::DoubleCircledNumber => {
zeroless(['⓵', '⓶', '⓷', '⓸', '⓹', '⓺', '⓻', '⓼', '⓽', '⓾'], n)
}
Self::LowerSimplifiedChinese => {
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
}
Self::UpperSimplifiedChinese => {
usize_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
}
Self::LowerTraditionalChinese => {
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n)
.into()
}
Self::UpperTraditionalChinese => {
usize_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n)
.into()
}
Self::EasternArabic => decimal('\u{0660}', n),
Self::EasternArabicPersian => decimal('\u{06F0}', n),
Self::DevanagariNumber => decimal('\u{0966}', n),
Self::BengaliNumber => decimal('\u{09E6}', n),
}
}
}
/// Stringify an integer to a Hebrew number.
fn hebrew_numeral(mut n: usize) -> EcoString {
if n == 0 {
return '-'.into();
}
let mut fmt = EcoString::new();
'outer: for (name, value) in [
('ת', 400),
('ש', 300),
('ר', 200),
('ק', 100),
('צ', 90),
('פ', 80),
('ע', 70),
('ס', 60),
('נ', 50),
('מ', 40),
('ל', 30),
('כ', 20),
('י', 10),
('ט', 9),
('ח', 8),
('ז', 7),
('ו', 6),
('ה', 5),
('ד', 4),
('ג', 3),
('ב', 2),
('א', 1),
] {
while n >= value {
match n {
15 => fmt.push_str("ט״ו"),
16 => fmt.push_str("ט״ז"),
_ => {
let append_geresh = n == value && fmt.is_empty();
if n == value && !fmt.is_empty() {
fmt.push('״');
}
fmt.push(name);
if append_geresh {
fmt.push('׳');
}
n -= value;
continue;
}
}
break 'outer;
}
}
fmt
}
/// Stringify an integer to a Roman numeral.
fn roman_numeral(mut n: usize, case: Case) -> EcoString {
if n == 0 {
return match case {
Case::Lower => 'n'.into(),
Case::Upper => 'N'.into(),
};
}
// Adapted from Yann Villessuzanne's roman.rs under the
// Unlicense, at https://github.com/linfir/roman.rs/
let mut fmt = EcoString::new();
for &(name, value) in &[
("", 1000000), ("", 1000000),
("", 500000), ("", 500000),
("", 100000), ("", 100000),
@ -598,213 +431,394 @@ fn roman_numeral(mut n: usize, case: Case) -> EcoString {
("V", 5), ("V", 5),
("IV", 4), ("IV", 4),
("I", 1), ("I", 1),
] { ("N", 0),
while n >= value { ],
n -= value; n,
for c in name.chars() { ),
match case { Self::LowerGreek => additive(
Case::Lower => fmt.extend(c.to_lowercase()), &[
Case::Upper => fmt.push(c), ("͵θ", 9000),
} ("͵η", 8000),
} ("͵ζ", 7000),
} ("͵ϛ", 6000),
("͵ε", 5000),
("͵δ", 4000),
("͵γ", 3000),
("͵β", 2000),
("͵α", 1000),
("ϡ", 900),
("ω", 800),
("ψ", 700),
("χ", 600),
("φ", 500),
("υ", 400),
("τ", 300),
("σ", 200),
("ρ", 100),
("ϟ", 90),
("π", 80),
("ο", 70),
("ξ", 60),
("ν", 50),
("μ", 40),
("λ", 30),
("κ", 20),
("ι", 10),
("θ", 9),
("η", 8),
("ζ", 7),
("ϛ", 6),
("ε", 5),
("δ", 4),
("γ", 3),
("β", 2),
("α", 1),
("𐆊", 0),
],
n,
),
Self::UpperGreek => additive(
&[
("͵Θ", 9000),
("͵Η", 8000),
("͵Ζ", 7000),
("͵Ϛ", 6000),
("͵Ε", 5000),
("͵Δ", 4000),
("͵Γ", 3000),
("͵Β", 2000),
("͵Α", 1000),
("Ϡ", 900),
("Ω", 800),
("Ψ", 700),
("Χ", 600),
("Φ", 500),
("Υ", 400),
("Τ", 300),
("Σ", 200),
("Ρ", 100),
("Ϟ", 90),
("Π", 80),
("Ο", 70),
("Ξ", 60),
("Ν", 50),
("Μ", 40),
("Λ", 30),
("Κ", 20),
("Ι", 10),
("Θ", 9),
("Η", 8),
("Ζ", 7),
("Ϛ", 6),
("Ε", 5),
("Δ", 4),
("Γ", 3),
("Β", 2),
("Α", 1),
("𐆊", 0),
],
n,
),
Self::Hebrew => additive(
&[
("ת", 400),
("ש", 300),
("ר", 200),
("ק", 100),
("צ", 90),
("פ", 80),
("ע", 70),
("ס", 60),
("נ", 50),
("מ", 40),
("ל", 30),
("כ", 20),
("יט", 19),
("יח", 18),
("יז", 17),
("טז", 16),
("טו", 15),
("י", 10),
("ט", 9),
("ח", 8),
("ז", 7),
("ו", 6),
("ה", 5),
("ד", 4),
("ג", 3),
("ב", 2),
("א", 1),
("-", 0),
],
n,
),
Self::LowerLatin => alphabetic(
&[
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',
'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
],
n,
),
Self::UpperLatin => alphabetic(
&[
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
],
n,
),
Self::HiraganaAiueo => alphabetic(
&[
'あ', 'い', 'う', 'え', 'お', 'か', 'き', 'く', 'け', 'こ', 'さ',
'し', 'す', 'せ', 'そ', 'た', 'ち', 'つ', 'て', 'と', 'な', 'に',
'ぬ', 'ね', 'の', 'は', 'ひ', 'ふ', 'へ', 'ほ', 'ま', 'み', 'む',
'め', 'も', 'や', 'ゆ', 'よ', 'ら', 'り', 'る', 'れ', 'ろ', 'わ',
'を', 'ん',
],
n,
),
Self::HiraganaIroha => alphabetic(
&[
'い', 'ろ', 'は', 'に', 'ほ', 'へ', 'と', 'ち', 'り', 'ぬ', 'る',
'を', 'わ', 'か', 'よ', 'た', 'れ', 'そ', 'つ', 'ね', 'な', 'ら',
'む', 'う', 'ゐ', 'の', 'お', 'く', 'や', 'ま', 'け', 'ふ', 'こ',
'え', 'て', 'あ', 'さ', 'き', 'ゆ', 'め', 'み', 'し', 'ゑ', 'ひ',
'も', 'せ', 'す',
],
n,
),
Self::KatakanaAiueo => alphabetic(
&[
'ア', 'イ', 'ウ', 'エ', 'オ', 'カ', 'キ', 'ク', 'ケ', 'コ', 'サ',
'シ', 'ス', 'セ', 'ソ', 'タ', 'チ', 'ツ', 'テ', 'ト', 'ナ', 'ニ',
'ヌ', 'ネ', '', 'ハ', 'ヒ', 'フ', 'ヘ', 'ホ', 'マ', 'ミ', 'ム',
'メ', 'モ', 'ヤ', 'ユ', 'ヨ', 'ラ', 'リ', 'ル', 'レ', 'ロ', 'ワ',
'ヲ', 'ン',
],
n,
),
Self::KatakanaIroha => alphabetic(
&[
'イ', 'ロ', 'ハ', 'ニ', 'ホ', 'ヘ', 'ト', 'チ', 'リ', 'ヌ', 'ル',
'ヲ', 'ワ', 'カ', 'ヨ', 'タ', 'レ', 'ソ', 'ツ', 'ネ', 'ナ', 'ラ',
'ム', 'ウ', 'ヰ', '', 'オ', 'ク', 'ヤ', 'マ', 'ケ', 'フ', 'コ',
'エ', 'テ', 'ア', 'サ', 'キ', 'ユ', 'メ', 'ミ', 'シ', 'ヱ', 'ヒ',
'モ', 'セ', 'ス',
],
n,
),
Self::KoreanJamo => alphabetic(
&[
'ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ',
'ㅌ', 'ㅍ', 'ㅎ',
],
n,
),
Self::KoreanSyllable => alphabetic(
&[
'가', '나', '다', '라', '마', '바', '사', '아', '자', '차', '카',
'타', '파', '하',
],
n,
),
Self::BengaliLetter => alphabetic(
&[
'ক', 'খ', 'গ', 'ঘ', 'ঙ', 'চ', 'ছ', 'জ', 'ঝ', 'ঞ', 'ট', 'ঠ', 'ড', 'ঢ',
'ণ', 'ত', 'থ', 'দ', 'ধ', 'ন', 'প', 'ফ', 'ব', 'ভ', 'ম', 'য', 'র', 'ল',
'শ', 'ষ', 'স', 'হ',
],
n,
),
Self::CircledNumber => fixed(
&[
'⓪', '①', '②', '③', '④', '⑤', '⑥', '⑦', '⑧', '⑨', '⑩', '⑪', '⑫', '⑬',
'⑭', '⑮', '⑯', '⑰', '⑱', '⑲', '⑳', '㉑', '㉒', '㉓', '㉔', '㉕',
'㉖', '㉗', '㉘', '㉙', '㉚', '㉛', '㉜', '㉝', '㉞', '㉟', '㊱',
'㊲', '㊳', '㊴', '㊵', '㊶', '㊷', '㊸', '㊹', '㊺', '㊻', '㊼',
'㊽', '㊾', '㊿',
],
n,
),
Self::DoubleCircledNumber => {
fixed(&['0', '⓵', '⓶', '⓷', '⓸', '⓹', '⓺', '⓻', '⓼', '⓽', '⓾'], n)
} }
fmt Self::LowerSimplifiedChinese => {
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Lower, n).into()
}
Self::UpperSimplifiedChinese => {
u64_to_chinese(ChineseVariant::Simple, ChineseCase::Upper, n).into()
}
Self::LowerTraditionalChinese => {
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Lower, n).into()
}
Self::UpperTraditionalChinese => {
u64_to_chinese(ChineseVariant::Traditional, ChineseCase::Upper, n).into()
}
Self::EasternArabic => {
numeric(&['٠', '١', '٢', '٣', '٤', '٥', '٦', '٧', '٨', '٩'], n)
}
Self::EasternArabicPersian => {
numeric(&['۰', '۱', '۲', '۳', '۴', '۵', '۶', '۷', '۸', '۹'], n)
}
Self::DevanagariNumber => {
numeric(&['', '१', '२', '३', '४', '५', '६', '७', '८', '९'], n)
}
Self::BengaliNumber => {
numeric(&['', '১', '২', '৩', '', '৫', '৬', '', '৮', '৯'], n)
}
Self::Symbol => symbolic(&['*', '†', '‡', '§', '¶', '‖'], n),
}
}
} }
/// Stringify an integer to Greek numbers. /// Stringify a number using symbols representing values. The decimal
/// representation of the number is recovered by summing over the values of the
/// symbols present.
/// ///
/// Greek numbers use the Greek Alphabet to represent numbers; it is based on 10 /// Consider the situation where ['I': 1, 'IV': 4, 'V': 5],
/// (decimal). Here we implement the single digit M power representation from
/// [The Greek Number Converter][convert] and also described in
/// [Greek Numbers][numbers].
///
/// [converter]: https://www.russellcottrell.com/greek/utilities/GreekNumberConverter.htm
/// [numbers]: https://mathshistory.st-andrews.ac.uk/HistTopics/Greek_numbers/
fn greek_numeral(n: usize, case: Case) -> EcoString {
let thousands = [
["͵α", "͵Α"],
["͵β", "͵Β"],
["͵γ", "͵Γ"],
["͵δ", "͵Δ"],
["͵ε", "͵Ε"],
["͵ϛ", "͵Ϛ"],
["͵ζ", "͵Ζ"],
["͵η", "͵Η"],
["͵θ", "͵Θ"],
];
let hundreds = [
["ρ", "Ρ"],
["σ", "Σ"],
["τ", "Τ"],
["υ", "Υ"],
["φ", "Φ"],
["χ", "Χ"],
["ψ", "Ψ"],
["ω", "Ω"],
["ϡ", "Ϡ"],
];
let tens = [
["ι", "Ι"],
["κ", "Κ"],
["λ", "Λ"],
["μ", "Μ"],
["ν", "Ν"],
["ξ", "Ξ"],
["ο", "Ο"],
["π", "Π"],
["ϙ", "Ϟ"],
];
let ones = [
["α", "Α"],
["β", "Β"],
["γ", "Γ"],
["δ", "Δ"],
["ε", "Ε"],
["ϛ", "Ϛ"],
["ζ", "Ζ"],
["η", "Η"],
["θ", "Θ"],
];
if n == 0 {
// Greek Zero Sign
return '𐆊'.into();
}
let mut fmt = EcoString::new();
let case = match case {
Case::Lower => 0,
Case::Upper => 1,
};
// Extract a list of decimal digits from the number
let mut decimal_digits: Vec<usize> = Vec::new();
let mut n = n;
while n > 0 {
decimal_digits.push(n % 10);
n /= 10;
}
// Pad the digits with leading zeros to ensure we can form groups of 4
while decimal_digits.len() % 4 != 0 {
decimal_digits.push(0);
}
decimal_digits.reverse();
let mut m_power = decimal_digits.len() / 4;
// M are used to represent 10000, M_power = 2 means 10000^2 = 10000 0000
// The prefix of M is also made of Greek numerals but only be single digits, so it is 9 at max. This enables us
// to represent up to (10000)^(9 + 1) - 1 = 10^40 -1 (9,999,999,999,999,999,999,999,999,999,999,999,999,999)
let get_m_prefix = |m_power: usize| {
if m_power == 0 {
None
} else {
assert!(m_power <= 9);
// the prefix of M is a single digit lowercase
Some(ones[m_power - 1][0])
}
};
let mut previous_has_number = false;
for chunk in decimal_digits.chunks_exact(4) {
// chunk must be exact 4 item
assert_eq!(chunk.len(), 4);
m_power = m_power.saturating_sub(1);
// `th`ousan, `h`undred, `t`en and `o`ne
let (th, h, t, o) = (chunk[0], chunk[1], chunk[2], chunk[3]);
if th + h + t + o == 0 {
continue;
}
if previous_has_number {
fmt.push_str(", ");
}
if let Some(m_prefix) = get_m_prefix(m_power) {
fmt.push_str(m_prefix);
fmt.push_str("Μ");
}
if th != 0 {
let thousand_digit = thousands[th - 1][case];
fmt.push_str(thousand_digit);
}
if h != 0 {
let hundred_digit = hundreds[h - 1][case];
fmt.push_str(hundred_digit);
}
if t != 0 {
let ten_digit = tens[t - 1][case];
fmt.push_str(ten_digit);
}
if o != 0 {
let one_digit = ones[o - 1][case];
fmt.push_str(one_digit);
}
// if we do not have thousan, we need to append 'ʹ' at the end.
if th == 0 {
fmt.push_str("ʹ");
}
previous_has_number = true;
}
fmt
}
/// Stringify a number using a base-N counting system with no zero digit.
///
/// This is best explained by example. Suppose our digits are 'A', 'B', and 'C'.
/// We would get the following:
/// ///
/// ```text /// ```text
/// 1 => "A" /// 1 => 'I'
/// 2 => "B" /// 2 => 'II'
/// 3 => "C" /// 3 => 'III'
/// 4 => "AA" /// 4 => 'IV'
/// 5 => "AB" /// 5 => 'V'
/// 6 => "AC" /// 6 => 'VI'
/// 7 => "BA" /// 7 => 'VII'
/// 8 => "BB" /// 8 => 'VIII'
/// 9 => "BC"
/// 10 => "CA"
/// 11 => "CB"
/// 12 => "CC"
/// 13 => "AAA"
/// etc.
/// ``` /// ```
/// ///
/// You might be familiar with this scheme from the way spreadsheet software /// where this is the start of the familiar Roman numeral system.
/// tends to label its columns. fn additive(symbols: &[(&str, u64)], mut n: u64) -> EcoString {
fn zeroless<const N_DIGITS: usize>( if n == 0 {
alphabet: [char; N_DIGITS], if let Some(&(symbol, 0)) = symbols.last() {
mut n: usize, return symbol.into();
) -> EcoString { }
return '0'.into();
}
let mut s = EcoString::new();
for (symbol, weight) in symbols {
if *weight == 0 || *weight > n {
continue;
}
let reps = n / weight;
for _ in 0..reps {
s.push_str(symbol);
}
n -= weight * reps;
if n == 0 {
return s;
}
}
s
}
/// Stringify a number using a base-n (where n is the number of provided
/// symbols) system without a zero symbol.
///
/// Consider the situation where ['A', 'B', 'C'] are the provided symbols,
///
/// ```text
/// 1 => 'A'
/// 2 => 'B'
/// 3 => 'C'
/// 4 => 'AA
/// 5 => 'AB'
/// 6 => 'AC'
/// 7 => 'BA'
/// ...
/// ```
///
/// This system is commonly used in spreadsheet software.
fn alphabetic(symbols: &[char], mut n: u64) -> EcoString {
let n_digits = symbols.len() as u64;
if n == 0 { if n == 0 {
return '-'.into(); return '-'.into();
} }
let mut cs = EcoString::new(); let mut s = EcoString::new();
while n > 0 { while n != 0 {
n -= 1; n -= 1;
cs.push(alphabet[n % N_DIGITS]); s.push(symbols[(n % n_digits) as usize]);
n /= N_DIGITS; n /= n_digits;
} }
cs.chars().rev().collect() s.chars().rev().collect()
} }
/// Stringify a number using a base-10 counting system with a zero digit. /// Stringify a number using the symbols provided, defaulting to the arabic
/// representation when the number is greater than the number of symbols.
/// ///
/// This function assumes that the digits occupy contiguous codepoints. /// Consider the situation where ['0', 'A', 'B', 'C'] are the provided symbols,
fn decimal(start: char, mut n: usize) -> EcoString { ///
if n == 0 { /// ```text
return start.into(); /// 0 => '0'
/// 1 => 'A'
/// 2 => 'B'
/// 3 => 'C'
/// 4 => '4'
/// ...
/// n => 'n'
/// ```
fn fixed(symbols: &[char], n: u64) -> EcoString {
let n_digits = symbols.len() as u64;
if n < n_digits {
return symbols[(n) as usize].into();
} }
let mut cs = EcoString::new(); eco_format!("{n}")
while n > 0 { }
cs.push(char::from_u32((start as u32) + ((n % 10) as u32)).unwrap());
n /= 10; /// Stringify a number using a base-n (where n is the number of provided
} /// symbols) system with a zero symbol.
cs.chars().rev().collect() ///
/// Consider the situation where ['0', '1', '2'] are the provided symbols,
///
/// ```text
/// 0 => '0'
/// 1 => '1'
/// 2 => '2'
/// 3 => '10'
/// 4 => '11'
/// 5 => '12'
/// 6 => '20'
/// ...
/// ```
///
/// which is the familiar trinary counting system.
fn numeric(symbols: &[char], mut n: u64) -> EcoString {
let n_digits = symbols.len() as u64;
if n == 0 {
return symbols[0].into();
}
let mut s = EcoString::new();
while n != 0 {
s.push(symbols[(n % n_digits) as usize]);
n /= n_digits;
}
s.chars().rev().collect()
}
/// Stringify a number using repeating symbols.
///
/// Consider the situation where ['A', 'B', 'C'] are the provided symbols,
///
/// ```text
/// 0 => '-'
/// 1 => 'A'
/// 2 => 'B'
/// 3 => 'C'
/// 4 => 'AA'
/// 5 => 'BB'
/// 6 => 'CC'
/// 7 => 'AAA'
/// ...
/// ```
fn symbolic(symbols: &[char], n: u64) -> EcoString {
let n_digits = symbols.len() as u64;
if n == 0 {
return '-'.into();
}
EcoString::from(symbols[((n - 1) % n_digits) as usize])
.repeat((n.div_ceil(n_digits)) as usize)
} }

View File

@ -21,9 +21,10 @@ use crate::text::TextElem;
/// ///
/// The default, a `{"normal"}` reference, produces a textual reference to a /// The default, a `{"normal"}` reference, produces a textual reference to a
/// label. For example, a reference to a heading will yield an appropriate /// label. For example, a reference to a heading will yield an appropriate
/// string such as "Section 1" for a reference to the first heading. The /// string such as "Section 1" for a reference to the first heading. The word
/// references are also links to the respective element. Reference syntax can /// "Section" depends on the [`lang`]($text.lang) setting and is localized
/// also be used to [cite] from a bibliography. /// accordingly. The references are also links to the respective element.
/// Reference syntax can also be used to [cite] from a bibliography.
/// ///
/// As the default form requires a supplement and numbering, the label must be /// As the default form requires a supplement and numbering, the label must be
/// attached to a _referenceable element_. Referenceable elements include /// attached to a _referenceable element_. Referenceable elements include

View File

@ -1,4 +1,4 @@
use std::num::NonZeroUsize; use std::num::{NonZeroU32, NonZeroUsize};
use std::sync::Arc; use std::sync::Arc;
use typst_utils::NonZeroExt; use typst_utils::NonZeroExt;
@ -292,16 +292,61 @@ fn show_cellgrid_html(grid: CellGrid, styles: StyleChain) -> Content {
elem(tag::tr, Content::sequence(row)) elem(tag::tr, Content::sequence(row))
}; };
// TODO(subfooters): similarly to headers, take consecutive footers from
// the end for 'tfoot'.
let footer = grid.footer.map(|ft| { let footer = grid.footer.map(|ft| {
let rows = rows.drain(ft.unwrap().start..); let rows = rows.drain(ft.start..);
elem(tag::tfoot, Content::sequence(rows.map(|row| tr(tag::td, row)))) elem(tag::tfoot, Content::sequence(rows.map(|row| tr(tag::td, row))))
}); });
let header = grid.header.map(|hd| {
let rows = rows.drain(..hd.unwrap().end);
elem(tag::thead, Content::sequence(rows.map(|row| tr(tag::th, row))))
});
let mut body = Content::sequence(rows.into_iter().map(|row| tr(tag::td, row))); // Store all consecutive headers at the start in 'thead'. All remaining
// headers are just 'th' rows across the table body.
let mut consecutive_header_end = 0;
let first_mid_table_header = grid
.headers
.iter()
.take_while(|hd| {
let is_consecutive = hd.range.start == consecutive_header_end;
consecutive_header_end = hd.range.end;
is_consecutive
})
.count();
let (y_offset, header) = if first_mid_table_header > 0 {
let removed_header_rows =
grid.headers.get(first_mid_table_header - 1).unwrap().range.end;
let rows = rows.drain(..removed_header_rows);
(
removed_header_rows,
Some(elem(tag::thead, Content::sequence(rows.map(|row| tr(tag::th, row))))),
)
} else {
(0, None)
};
// TODO: Consider improving accessibility properties of multi-level headers
// inside tables in the future, e.g. indicating which columns they are
// relative to and so on. See also:
// https://www.w3.org/WAI/tutorials/tables/multi-level/
let mut next_header = first_mid_table_header;
let mut body =
Content::sequence(rows.into_iter().enumerate().map(|(relative_y, row)| {
let y = relative_y + y_offset;
if let Some(current_header) =
grid.headers.get(next_header).filter(|h| h.range.contains(&y))
{
if y + 1 == current_header.range.end {
next_header += 1;
}
tr(tag::th, row)
} else {
tr(tag::td, row)
}
}));
if header.is_some() || footer.is_some() { if header.is_some() || footer.is_some() {
body = elem(tag::tbody, body); body = elem(tag::tbody, body);
} }
@ -492,6 +537,17 @@ pub struct TableHeader {
#[default(true)] #[default(true)]
pub repeat: bool, pub repeat: bool,
/// The level of the header. Must not be zero.
///
/// This allows repeating multiple headers at once. Headers with different
/// levels can repeat together, as long as they have ascending levels.
///
/// Notably, when a header with a lower level starts repeating, all higher
/// or equal level headers stop repeating (they are "replaced" by the new
/// header).
#[default(NonZeroU32::ONE)]
pub level: NonZeroU32,
/// The cells and lines within the header. /// The cells and lines within the header.
#[variadic] #[variadic]
pub children: Vec<TableItem>, pub children: Vec<TableItem>,
@ -770,7 +826,14 @@ impl Show for Packed<TableCell> {
impl Default for Packed<TableCell> { impl Default for Packed<TableCell> {
fn default() -> Self { fn default() -> Self {
Packed::new(TableCell::new(Content::default())) Packed::new(
// Explicitly set colspan and rowspan to ensure they won't be
// overridden by set rules (default cells are created after
// colspans and rowspans are processed in the resolver)
TableCell::new(Content::default())
.with_colspan(NonZeroUsize::ONE)
.with_rowspan(NonZeroUsize::ONE),
)
} }
} }

View File

@ -189,13 +189,15 @@ impl Show for Packed<TermsElem> {
.styled(TermsElem::set_within(true)); .styled(TermsElem::set_within(true));
if tight { if tight {
let leading = ParElem::leading_in(styles); let spacing = self
let spacing = VElem::new(leading.into()) .spacing(styles)
.unwrap_or_else(|| ParElem::leading_in(styles).into());
let v = VElem::new(spacing.into())
.with_weak(true) .with_weak(true)
.with_attach(true) .with_attach(true)
.pack() .pack()
.spanned(span); .spanned(span);
realized = spacing + realized; realized = v + realized;
} }
Ok(realized) Ok(realized)

View File

@ -55,6 +55,7 @@ routines! {
fn eval_string( fn eval_string(
routines: &Routines, routines: &Routines,
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
sink: TrackedMut<Sink>,
string: &str, string: &str,
span: Span, span: Span,
mode: EvalMode, mode: EvalMode,

View File

@ -373,6 +373,7 @@ pub struct Decoration {
/// A kind of decorative line. /// A kind of decorative line.
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
#[allow(clippy::large_enum_variant)]
pub enum DecoLine { pub enum DecoLine {
Underline { Underline {
stroke: Stroke<Abs>, stroke: Stroke<Abs>,

View File

@ -194,6 +194,8 @@ bitflags::bitflags! {
const MONOSPACE = 1 << 0; const MONOSPACE = 1 << 0;
/// Glyphs have short strokes at their stems. /// Glyphs have short strokes at their stems.
const SERIF = 1 << 1; const SERIF = 1 << 1;
/// Font face has a MATH table
const MATH = 1 << 2;
} }
} }
@ -272,6 +274,7 @@ impl FontInfo {
let mut flags = FontFlags::empty(); let mut flags = FontFlags::empty();
flags.set(FontFlags::MONOSPACE, ttf.is_monospaced()); flags.set(FontFlags::MONOSPACE, ttf.is_monospaced());
flags.set(FontFlags::MATH, ttf.tables().math.is_some());
// Determine whether this is a serif or sans-serif font. // Determine whether this is a serif or sans-serif font.
if let Some(panose) = ttf if let Some(panose) = ttf

View File

@ -106,13 +106,26 @@ impl Font {
} }
/// Look up the horizontal advance width of a glyph. /// Look up the horizontal advance width of a glyph.
pub fn advance(&self, glyph: u16) -> Option<Em> { pub fn x_advance(&self, glyph: u16) -> Option<Em> {
self.0 self.0
.ttf .ttf
.glyph_hor_advance(GlyphId(glyph)) .glyph_hor_advance(GlyphId(glyph))
.map(|units| self.to_em(units)) .map(|units| self.to_em(units))
} }
/// Look up the vertical advance width of a glyph.
pub fn y_advance(&self, glyph: u16) -> Option<Em> {
self.0
.ttf
.glyph_ver_advance(GlyphId(glyph))
.map(|units| self.to_em(units))
}
/// Look up the width of a space.
pub fn space_width(&self) -> Option<Em> {
self.0.ttf.glyph_index(' ').and_then(|id| self.x_advance(id.0))
}
/// Lookup a name by id. /// Lookup a name by id.
pub fn find_name(&self, id: u16) -> Option<String> { pub fn find_name(&self, id: u16) -> Option<String> {
find_name(&self.0.ttf, id) find_name(&self.0.ttf, id)

View File

@ -35,6 +35,11 @@ impl TextItem {
pub fn width(&self) -> Abs { pub fn width(&self) -> Abs {
self.glyphs.iter().map(|g| g.x_advance).sum::<Em>().at(self.size) self.glyphs.iter().map(|g| g.x_advance).sum::<Em>().at(self.size)
} }
/// The height of the text run.
pub fn height(&self) -> Abs {
self.glyphs.iter().map(|g| g.y_advance).sum::<Em>().at(self.size)
}
} }
impl Debug for TextItem { impl Debug for TextItem {
@ -54,6 +59,10 @@ pub struct Glyph {
pub x_advance: Em, pub x_advance: Em,
/// The horizontal offset of the glyph. /// The horizontal offset of the glyph.
pub x_offset: Em, pub x_offset: Em,
/// The advance height (Y-up) of the glyph.
pub y_advance: Em,
/// The vertical offset (Y-up) of the glyph.
pub y_offset: Em,
/// The range of the glyph in its item's text. The range's length may /// The range of the glyph in its item's text. The range's length may
/// be more than one due to multi-byte UTF-8 encoding or ligatures. /// be more than one due to multi-byte UTF-8 encoding or ligatures.
pub range: Range<u16>, pub range: Range<u16>,
@ -115,4 +124,13 @@ impl<'a> TextItemView<'a> {
.sum::<Em>() .sum::<Em>()
.at(self.item.size) .at(self.item.size)
} }
/// The total height of this text slice
pub fn height(&self) -> Abs {
self.glyphs()
.iter()
.map(|g| g.y_advance)
.sum::<Em>()
.at(self.item.size)
}
} }

View File

@ -14,7 +14,7 @@ macro_rules! translation {
}; };
} }
const TRANSLATIONS: [(&str, &str); 38] = [ const TRANSLATIONS: [(&str, &str); 40] = [
translation!("ar"), translation!("ar"),
translation!("bg"), translation!("bg"),
translation!("ca"), translation!("ca"),
@ -31,10 +31,12 @@ const TRANSLATIONS: [(&str, &str); 38] = [
translation!("el"), translation!("el"),
translation!("he"), translation!("he"),
translation!("hu"), translation!("hu"),
translation!("id"),
translation!("is"), translation!("is"),
translation!("it"), translation!("it"),
translation!("ja"), translation!("ja"),
translation!("la"), translation!("la"),
translation!("lv"),
translation!("nb"), translation!("nb"),
translation!("nl"), translation!("nl"),
translation!("nn"), translation!("nn"),
@ -82,9 +84,11 @@ impl Lang {
pub const HEBREW: Self = Self(*b"he ", 2); pub const HEBREW: Self = Self(*b"he ", 2);
pub const HUNGARIAN: Self = Self(*b"hu ", 2); pub const HUNGARIAN: Self = Self(*b"hu ", 2);
pub const ICELANDIC: Self = Self(*b"is ", 2); pub const ICELANDIC: Self = Self(*b"is ", 2);
pub const INDONESIAN: Self = Self(*b"id ", 2);
pub const ITALIAN: Self = Self(*b"it ", 2); pub const ITALIAN: Self = Self(*b"it ", 2);
pub const JAPANESE: Self = Self(*b"ja ", 2); pub const JAPANESE: Self = Self(*b"ja ", 2);
pub const LATIN: Self = Self(*b"la ", 2); pub const LATIN: Self = Self(*b"la ", 2);
pub const LATVIAN: Self = Self(*b"lv ", 2);
pub const LOWER_SORBIAN: Self = Self(*b"dsb", 3); pub const LOWER_SORBIAN: Self = Self(*b"dsb", 3);
pub const NYNORSK: Self = Self(*b"nn ", 2); pub const NYNORSK: Self = Self(*b"nn ", 2);
pub const POLISH: Self = Self(*b"pl ", 2); pub const POLISH: Self = Self(*b"pl ", 2);

View File

@ -30,6 +30,7 @@ pub use self::space::*;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::Hash; use std::hash::Hash;
use std::str::FromStr;
use std::sync::LazyLock; use std::sync::LazyLock;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
@ -42,7 +43,7 @@ use ttf_parser::Tag;
use typst_syntax::Spanned; use typst_syntax::Spanned;
use typst_utils::singleton; use typst_utils::singleton;
use crate::diag::{bail, warning, HintedStrResult, SourceResult}; use crate::diag::{bail, warning, HintedStrResult, SourceResult, StrResult};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{ use crate::foundations::{
cast, dict, elem, Args, Array, Cast, Construct, Content, Dict, Fold, IntoValue, cast, dict, elem, Args, Array, Cast, Construct, Content, Dict, Fold, IntoValue,
@ -348,15 +349,17 @@ pub struct TextElem {
/// This can make justification visually more pleasing. /// This can make justification visually more pleasing.
/// ///
/// ```example /// ```example
/// #set page(width: 220pt)
///
/// #set par(justify: true) /// #set par(justify: true)
/// This justified text has a hyphen in /// This justified text has a hyphen in
/// the paragraph's first line. Hanging /// the paragraph's second line. Hanging
/// the hyphen slightly into the margin /// the hyphen slightly into the margin
/// results in a clearer paragraph edge. /// results in a clearer paragraph edge.
/// ///
/// #set text(overhang: false) /// #set text(overhang: false)
/// This justified text has a hyphen in /// This justified text has a hyphen in
/// the paragraph's first line. Hanging /// the paragraph's second line. Hanging
/// the hyphen slightly into the margin /// the hyphen slightly into the margin
/// results in a clearer paragraph edge. /// results in a clearer paragraph edge.
/// ``` /// ```
@ -891,9 +894,21 @@ cast! {
} }
/// Font family fallback list. /// Font family fallback list.
///
/// Must contain at least one font.
#[derive(Debug, Default, Clone, PartialEq, Hash)] #[derive(Debug, Default, Clone, PartialEq, Hash)]
pub struct FontList(pub Vec<FontFamily>); pub struct FontList(pub Vec<FontFamily>);
impl FontList {
pub fn new(fonts: Vec<FontFamily>) -> StrResult<Self> {
if fonts.is_empty() {
bail!("font fallback list must not be empty")
} else {
Ok(Self(fonts))
}
}
}
impl<'a> IntoIterator for &'a FontList { impl<'a> IntoIterator for &'a FontList {
type IntoIter = std::slice::Iter<'a, FontFamily>; type IntoIter = std::slice::Iter<'a, FontFamily>;
type Item = &'a FontFamily; type Item = &'a FontFamily;
@ -911,7 +926,7 @@ cast! {
self.0.into_value() self.0.into_value()
}, },
family: FontFamily => Self(vec![family]), family: FontFamily => Self(vec![family]),
values: Array => Self(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?), values: Array => Self::new(values.into_iter().map(|v| v.cast()).collect::<HintedStrResult<_>>()?)?,
} }
/// Resolve a prioritized iterator over the font families. /// Resolve a prioritized iterator over the font families.
@ -1269,6 +1284,12 @@ pub fn features(styles: StyleChain) -> Vec<Feature> {
feat(b"frac", 1); feat(b"frac", 1);
} }
match EquationElem::size_in(styles) {
MathSize::Script => feat(b"ssty", 1),
MathSize::ScriptScript => feat(b"ssty", 2),
_ => {}
}
for (tag, value) in TextElem::features_in(styles).0 { for (tag, value) in TextElem::features_in(styles).0 {
tags.push(Feature::new(tag, value, ..)) tags.push(Feature::new(tag, value, ..))
} }
@ -1276,6 +1297,17 @@ pub fn features(styles: StyleChain) -> Vec<Feature> {
tags tags
} }
/// Process the language and region of a style chain into a
/// rustybuzz-compatible BCP 47 language.
pub fn language(styles: StyleChain) -> rustybuzz::Language {
let mut bcp: EcoString = TextElem::lang_in(styles).as_str().into();
if let Some(region) = TextElem::region_in(styles) {
bcp.push('-');
bcp.push_str(region.as_str());
}
rustybuzz::Language::from_str(&bcp).unwrap()
}
/// A toggle that turns on and off alternatingly if folded. /// A toggle that turns on and off alternatingly if folded.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct ItalicToggle(pub bool); pub struct ItalicToggle(pub bool);

View File

@ -3,15 +3,17 @@ use std::ops::Range;
use std::sync::{Arc, LazyLock}; use std::sync::{Arc, LazyLock};
use comemo::Tracked; use comemo::Tracked;
use ecow::{eco_format, EcoString, EcoVec}; use ecow::{EcoString, EcoVec};
use syntect::highlighting as synt; use syntect::highlighting::{self as synt};
use syntect::parsing::{SyntaxDefinition, SyntaxSet, SyntaxSetBuilder}; use syntect::parsing::{ParseSyntaxError, SyntaxDefinition, SyntaxSet, SyntaxSetBuilder};
use typst_syntax::{split_newlines, LinkedNode, Span, Spanned}; use typst_syntax::{split_newlines, LinkedNode, Span, Spanned};
use typst_utils::ManuallyHash; use typst_utils::ManuallyHash;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use super::Lang; use super::Lang;
use crate::diag::{At, FileError, SourceResult, StrResult}; use crate::diag::{
LineCol, LoadError, LoadResult, LoadedWithin, ReportPos, SourceResult,
};
use crate::engine::Engine; use crate::engine::Engine;
use crate::foundations::{ use crate::foundations::{
cast, elem, scope, Bytes, Content, Derived, NativeElement, OneOrMultiple, Packed, cast, elem, scope, Bytes, Content, Derived, NativeElement, OneOrMultiple, Packed,
@ -539,40 +541,29 @@ impl RawSyntax {
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
sources: Spanned<OneOrMultiple<DataSource>>, sources: Spanned<OneOrMultiple<DataSource>>,
) -> SourceResult<Derived<OneOrMultiple<DataSource>, Vec<RawSyntax>>> { ) -> SourceResult<Derived<OneOrMultiple<DataSource>, Vec<RawSyntax>>> {
let data = sources.load(world)?; let loaded = sources.load(world)?;
let list = sources let list = loaded
.v
.0
.iter() .iter()
.zip(&data) .map(|data| Self::decode(&data.data).within(data))
.map(|(source, data)| Self::decode(source, data)) .collect::<SourceResult<_>>()?;
.collect::<StrResult<_>>()
.at(sources.span)?;
Ok(Derived::new(sources.v, list)) Ok(Derived::new(sources.v, list))
} }
/// Decode a syntax from a loaded source. /// Decode a syntax from a loaded source.
#[comemo::memoize] #[comemo::memoize]
#[typst_macros::time(name = "load syntaxes")] #[typst_macros::time(name = "load syntaxes")]
fn decode(source: &DataSource, data: &Bytes) -> StrResult<RawSyntax> { fn decode(bytes: &Bytes) -> LoadResult<RawSyntax> {
let src = data.as_str().map_err(FileError::from)?; let str = bytes.as_str()?;
let syntax = SyntaxDefinition::load_from_str(src, false, None).map_err(
|err| match source { let syntax = SyntaxDefinition::load_from_str(str, false, None)
DataSource::Path(path) => { .map_err(format_syntax_error)?;
eco_format!("failed to parse syntax file `{path}` ({err})")
}
DataSource::Bytes(_) => {
eco_format!("failed to parse syntax ({err})")
}
},
)?;
let mut builder = SyntaxSetBuilder::new(); let mut builder = SyntaxSetBuilder::new();
builder.add(syntax); builder.add(syntax);
Ok(RawSyntax(Arc::new(ManuallyHash::new( Ok(RawSyntax(Arc::new(ManuallyHash::new(
builder.build(), builder.build(),
typst_utils::hash128(data), typst_utils::hash128(bytes),
)))) ))))
} }
@ -582,6 +573,24 @@ impl RawSyntax {
} }
} }
fn format_syntax_error(error: ParseSyntaxError) -> LoadError {
let pos = syntax_error_pos(&error);
LoadError::new(pos, "failed to parse syntax", error)
}
fn syntax_error_pos(error: &ParseSyntaxError) -> ReportPos {
match error {
ParseSyntaxError::InvalidYaml(scan_error) => {
let m = scan_error.marker();
ReportPos::full(
m.index()..m.index(),
LineCol::one_based(m.line(), m.col() + 1),
)
}
_ => ReportPos::None,
}
}
/// A loaded syntect theme. /// A loaded syntect theme.
#[derive(Debug, Clone, PartialEq, Hash)] #[derive(Debug, Clone, PartialEq, Hash)]
pub struct RawTheme(Arc<ManuallyHash<synt::Theme>>); pub struct RawTheme(Arc<ManuallyHash<synt::Theme>>);
@ -592,18 +601,18 @@ impl RawTheme {
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
source: Spanned<DataSource>, source: Spanned<DataSource>,
) -> SourceResult<Derived<DataSource, Self>> { ) -> SourceResult<Derived<DataSource, Self>> {
let data = source.load(world)?; let loaded = source.load(world)?;
let theme = Self::decode(&data).at(source.span)?; let theme = Self::decode(&loaded.data).within(&loaded)?;
Ok(Derived::new(source.v, theme)) Ok(Derived::new(source.v, theme))
} }
/// Decode a theme from bytes. /// Decode a theme from bytes.
#[comemo::memoize] #[comemo::memoize]
fn decode(data: &Bytes) -> StrResult<RawTheme> { fn decode(bytes: &Bytes) -> LoadResult<RawTheme> {
let mut cursor = std::io::Cursor::new(data.as_slice()); let mut cursor = std::io::Cursor::new(bytes.as_slice());
let theme = synt::ThemeSet::load_from_reader(&mut cursor) let theme =
.map_err(|err| eco_format!("failed to parse theme ({err})"))?; synt::ThemeSet::load_from_reader(&mut cursor).map_err(format_theme_error)?;
Ok(RawTheme(Arc::new(ManuallyHash::new(theme, typst_utils::hash128(data))))) Ok(RawTheme(Arc::new(ManuallyHash::new(theme, typst_utils::hash128(bytes)))))
} }
/// Get the underlying syntect theme. /// Get the underlying syntect theme.
@ -612,6 +621,14 @@ impl RawTheme {
} }
} }
fn format_theme_error(error: syntect::LoadingError) -> LoadError {
let pos = match &error {
syntect::LoadingError::ParseSyntax(err, _) => syntax_error_pos(err),
_ => ReportPos::None,
};
LoadError::new(pos, "failed to parse theme", error)
}
/// A highlighted line of raw text. /// A highlighted line of raw text.
/// ///
/// This is a helper element that is synthesized by [`raw`] elements. /// This is a helper element that is synthesized by [`raw`] elements.

View File

@ -237,8 +237,8 @@ impl<'s> SmartQuotes<'s> {
"cs" | "da" | "de" | "sk" | "sl" if alternative => ("", "", "»", "«"), "cs" | "da" | "de" | "sk" | "sl" if alternative => ("", "", "»", "«"),
"cs" | "de" | "et" | "is" | "lt" | "lv" | "sk" | "sl" => low_high, "cs" | "de" | "et" | "is" | "lt" | "lv" | "sk" | "sl" => low_high,
"da" => ("", "", "", ""), "da" => ("", "", "", ""),
"fr" | "ru" if alternative => default, "fr" if alternative => default,
"fr" => ("\u{00A0}", "\u{00A0}", "«\u{00A0}", "\u{00A0}»"), "fr" => ("", "", "«\u{202F}", "\u{202F}»"),
"fi" | "sv" if alternative => ("", "", "»", "»"), "fi" | "sv" if alternative => ("", "", "»", "»"),
"bs" | "fi" | "sv" => ("", "", "", ""), "bs" | "fi" | "sv" => ("", "", "", ""),
"it" if alternative => default, "it" if alternative => default,
@ -247,7 +247,9 @@ impl<'s> SmartQuotes<'s> {
"es" if matches!(region, Some("ES") | None) => ("", "", "«", "»"), "es" if matches!(region, Some("ES") | None) => ("", "", "«", "»"),
"hu" | "pl" | "ro" => ("", "", "", ""), "hu" | "pl" | "ro" => ("", "", "", ""),
"no" | "nb" | "nn" if alternative => low_high, "no" | "nb" | "nn" if alternative => low_high,
"ru" | "no" | "nb" | "nn" | "uk" => ("", "", "«", "»"), "no" | "nb" | "nn" => ("", "", "«", "»"),
"ru" => ("", "", "«", "»"),
"uk" => ("", "", "«", "»"),
"el" => ("", "", "«", "»"), "el" => ("", "", "«", "»"),
"he" => ("", "", "", ""), "he" => ("", "", "", ""),
"hr" => ("", "", "", ""), "hr" => ("", "", "", ""),

View File

@ -148,11 +148,11 @@ static TO_SRGB: LazyLock<qcms::Transform> = LazyLock::new(|| {
/// | `magma` | A black to purple to yellow color map. | /// | `magma` | A black to purple to yellow color map. |
/// | `plasma` | A purple to pink to yellow color map. | /// | `plasma` | A purple to pink to yellow color map. |
/// | `rocket` | A black to red to white color map. | /// | `rocket` | A black to red to white color map. |
/// | `mako` | A black to teal to yellow color map. | /// | `mako` | A black to teal to white color map. |
/// | `vlag` | A light blue to white to red color map. | /// | `vlag` | A light blue to white to red color map. |
/// | `icefire` | A light teal to black to yellow color map. | /// | `icefire` | A light teal to black to orange color map. |
/// | `flare` | A orange to purple color map that is perceptually uniform. | /// | `flare` | A orange to purple color map that is perceptually uniform. |
/// | `crest` | A blue to white to red color map. | /// | `crest` | A light green to blue color map. |
/// ///
/// Some popular presets are not included because they are not available under a /// Some popular presets are not included because they are not available under a
/// free licence. Others, like /// free licence. Others, like

View File

@ -10,6 +10,8 @@ use crate::foundations::{
use crate::layout::{Abs, Axes, BlockElem, Length, Point, Rel, Size}; use crate::layout::{Abs, Axes, BlockElem, Length, Point, Rel, Size};
use crate::visualize::{FillRule, Paint, Stroke}; use crate::visualize::{FillRule, Paint, Stroke};
use super::FixedStroke;
/// A curve consisting of movements, lines, and Bézier segments. /// A curve consisting of movements, lines, and Bézier segments.
/// ///
/// At any point in time, there is a conceptual pen or cursor. /// At any point in time, there is a conceptual pen or cursor.
@ -530,3 +532,65 @@ impl Curve {
Size::new(max_x - min_x, max_y - min_y) Size::new(max_x - min_x, max_y - min_y)
} }
} }
impl Curve {
fn to_kurbo(&self) -> impl Iterator<Item = kurbo::PathEl> + '_ {
use kurbo::PathEl;
self.0.iter().map(|item| match *item {
CurveItem::Move(point) => PathEl::MoveTo(point_to_kurbo(point)),
CurveItem::Line(point) => PathEl::LineTo(point_to_kurbo(point)),
CurveItem::Cubic(point, point1, point2) => PathEl::CurveTo(
point_to_kurbo(point),
point_to_kurbo(point1),
point_to_kurbo(point2),
),
CurveItem::Close => PathEl::ClosePath,
})
}
/// When this curve is interpreted as a clip mask, would it contain `point`?
pub fn contains(&self, fill_rule: FillRule, needle: Point) -> bool {
let kurbo = kurbo::BezPath::from_vec(self.to_kurbo().collect());
let windings = kurbo::Shape::winding(&kurbo, point_to_kurbo(needle));
match fill_rule {
FillRule::NonZero => windings != 0,
FillRule::EvenOdd => windings % 2 != 0,
}
}
/// When this curve is stroked with `stroke`, would the stroke contain
/// `point`?
pub fn stroke_contains(&self, stroke: &FixedStroke, needle: Point) -> bool {
let width = stroke.thickness.to_raw();
let cap = match stroke.cap {
super::LineCap::Butt => kurbo::Cap::Butt,
super::LineCap::Round => kurbo::Cap::Round,
super::LineCap::Square => kurbo::Cap::Square,
};
let join = match stroke.join {
super::LineJoin::Miter => kurbo::Join::Miter,
super::LineJoin::Round => kurbo::Join::Round,
super::LineJoin::Bevel => kurbo::Join::Bevel,
};
let miter_limit = stroke.miter_limit.get();
let mut style = kurbo::Stroke::new(width)
.with_caps(cap)
.with_join(join)
.with_miter_limit(miter_limit);
if let Some(dash) = &stroke.dash {
style = style.with_dashes(
dash.phase.to_raw(),
dash.array.iter().copied().map(Abs::to_raw),
);
}
let opts = kurbo::StrokeOpts::default();
let tolerance = 0.01;
let expanded = kurbo::stroke(self.to_kurbo(), &style, &opts, tolerance);
kurbo::Shape::contains(&expanded, point_to_kurbo(needle))
}
}
fn point_to_kurbo(point: Point) -> kurbo::Point {
kurbo::Point::new(point.x.to_raw(), point.y.to_raw())
}

View File

@ -120,12 +120,12 @@ use crate::visualize::{Color, ColorSpace, WeightedColor};
/// #let spaces = ( /// #let spaces = (
/// ("Oklab", color.oklab), /// ("Oklab", color.oklab),
/// ("Oklch", color.oklch), /// ("Oklch", color.oklch),
/// ("linear-RGB", color.linear-rgb),
/// ("sRGB", color.rgb), /// ("sRGB", color.rgb),
/// ("linear-RGB", color.linear-rgb),
/// ("CMYK", color.cmyk), /// ("CMYK", color.cmyk),
/// ("Grayscale", color.luma),
/// ("HSL", color.hsl), /// ("HSL", color.hsl),
/// ("HSV", color.hsv), /// ("HSV", color.hsv),
/// ("Grayscale", color.luma),
/// ) /// )
/// ///
/// #for (name, space) in spaces { /// #for (name, space) in spaces {
@ -549,7 +549,7 @@ impl Gradient {
} }
/// Repeats this gradient a given number of times, optionally mirroring it /// Repeats this gradient a given number of times, optionally mirroring it
/// at each repetition. /// at every second repetition.
/// ///
/// ```example /// ```example
/// #circle( /// #circle(
@ -564,7 +564,17 @@ impl Gradient {
&self, &self,
/// The number of times to repeat the gradient. /// The number of times to repeat the gradient.
repetitions: Spanned<usize>, repetitions: Spanned<usize>,
/// Whether to mirror the gradient at each repetition. /// Whether to mirror the gradient at every second repetition, i.e.,
/// the first instance (and all odd ones) stays unchanged.
///
/// ```example
/// #circle(
/// radius: 40pt,
/// fill: gradient
/// .conic(green, black)
/// .repeat(2, mirror: true)
/// )
/// ```
#[named] #[named]
#[default(false)] #[default(false)]
mirror: bool, mirror: bool,

View File

@ -22,7 +22,7 @@ use crate::foundations::{
Smart, StyleChain, Smart, StyleChain,
}; };
use crate::layout::{BlockElem, Length, Rel, Sizing}; use crate::layout::{BlockElem, Length, Rel, Sizing};
use crate::loading::{DataSource, Load, Readable}; use crate::loading::{DataSource, Load, LoadSource, Loaded, Readable};
use crate::model::Figurable; use crate::model::Figurable;
use crate::text::LocalName; use crate::text::LocalName;
@ -65,10 +65,10 @@ pub struct ImageElem {
#[required] #[required]
#[parse( #[parse(
let source = args.expect::<Spanned<DataSource>>("source")?; let source = args.expect::<Spanned<DataSource>>("source")?;
let data = source.load(engine.world)?; let loaded = source.load(engine.world)?;
Derived::new(source.v, data) Derived::new(source.v, loaded)
)] )]
pub source: Derived<DataSource, Bytes>, pub source: Derived<DataSource, Loaded>,
/// The image's format. /// The image's format.
/// ///
@ -77,8 +77,8 @@ pub struct ImageElem {
/// [`source`]($image.source) (even then, Typst will try to figure out the /// [`source`]($image.source) (even then, Typst will try to figure out the
/// format automatically, but that's not always possible). /// format automatically, but that's not always possible).
/// ///
/// Supported formats are `{"png"}`, `{"jpg"}`, `{"gif"}`, `{"svg"}` as well /// Supported formats are `{"png"}`, `{"jpg"}`, `{"gif"}`, `{"svg"}`,
/// as raw pixel data. Embedding PDFs as images is /// `{"webp"}` as well as raw pixel data. Embedding PDFs as images is
/// [not currently supported](https://github.com/typst/typst/issues/145). /// [not currently supported](https://github.com/typst/typst/issues/145).
/// ///
/// When providing raw pixel data as the `source`, you must specify a /// When providing raw pixel data as the `source`, you must specify a
@ -154,8 +154,8 @@ pub struct ImageElem {
/// to `{auto}`, Typst will try to extract an ICC profile from the image. /// to `{auto}`, Typst will try to extract an ICC profile from the image.
#[parse(match args.named::<Spanned<Smart<DataSource>>>("icc")? { #[parse(match args.named::<Spanned<Smart<DataSource>>>("icc")? {
Some(Spanned { v: Smart::Custom(source), span }) => Some(Smart::Custom({ Some(Spanned { v: Smart::Custom(source), span }) => Some(Smart::Custom({
let data = Spanned::new(&source, span).load(engine.world)?; let loaded = Spanned::new(&source, span).load(engine.world)?;
Derived::new(source, data) Derived::new(source, loaded.data)
})), })),
Some(Spanned { v: Smart::Auto, .. }) => Some(Smart::Auto), Some(Spanned { v: Smart::Auto, .. }) => Some(Smart::Auto),
None => None, None => None,
@ -173,7 +173,7 @@ impl ImageElem {
pub fn decode( pub fn decode(
span: Span, span: Span,
/// The data to decode as an image. Can be a string for SVGs. /// The data to decode as an image. Can be a string for SVGs.
data: Readable, data: Spanned<Readable>,
/// The image's format. Detected automatically by default. /// The image's format. Detected automatically by default.
#[named] #[named]
format: Option<Smart<ImageFormat>>, format: Option<Smart<ImageFormat>>,
@ -193,8 +193,10 @@ impl ImageElem {
#[named] #[named]
scaling: Option<Smart<ImageScaling>>, scaling: Option<Smart<ImageScaling>>,
) -> StrResult<Content> { ) -> StrResult<Content> {
let bytes = data.into_bytes(); let bytes = data.v.into_bytes();
let source = Derived::new(DataSource::Bytes(bytes.clone()), bytes); let loaded =
Loaded::new(Spanned::new(LoadSource::Bytes, data.span), bytes.clone());
let source = Derived::new(DataSource::Bytes(bytes), loaded);
let mut elem = ImageElem::new(source); let mut elem = ImageElem::new(source);
if let Some(format) = format { if let Some(format) = format {
elem.push_format(format); elem.push_format(format);

View File

@ -3,17 +3,17 @@ use std::hash::{Hash, Hasher};
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use image::codecs::gif::GifDecoder; use image::codecs::gif::GifDecoder;
use image::codecs::jpeg::JpegDecoder; use image::codecs::jpeg::JpegDecoder;
use image::codecs::png::PngDecoder; use image::codecs::png::PngDecoder;
use image::codecs::webp::WebPDecoder;
use image::{ use image::{
guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel, guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel,
}; };
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
/// A decoded raster image. /// A decoded raster image.
#[derive(Clone, Hash)] #[derive(Clone, Hash)]
pub struct RasterImage(Arc<Repr>); pub struct RasterImage(Arc<Repr>);
@ -22,7 +22,8 @@ pub struct RasterImage(Arc<Repr>);
struct Repr { struct Repr {
data: Bytes, data: Bytes,
format: RasterFormat, format: RasterFormat,
dynamic: image::DynamicImage, dynamic: Arc<DynamicImage>,
exif_rotation: Option<u32>,
icc: Option<Bytes>, icc: Option<Bytes>,
dpi: Option<f64>, dpi: Option<f64>,
} }
@ -50,6 +51,8 @@ impl RasterImage {
format: RasterFormat, format: RasterFormat,
icc: Smart<Bytes>, icc: Smart<Bytes>,
) -> StrResult<RasterImage> { ) -> StrResult<RasterImage> {
let mut exif_rot = None;
let (dynamic, icc, dpi) = match format { let (dynamic, icc, dpi) = match format {
RasterFormat::Exchange(format) => { RasterFormat::Exchange(format) => {
fn decode<T: ImageDecoder>( fn decode<T: ImageDecoder>(
@ -75,6 +78,7 @@ impl RasterImage {
ExchangeFormat::Jpg => decode(JpegDecoder::new(cursor), icc), ExchangeFormat::Jpg => decode(JpegDecoder::new(cursor), icc),
ExchangeFormat::Png => decode(PngDecoder::new(cursor), icc), ExchangeFormat::Png => decode(PngDecoder::new(cursor), icc),
ExchangeFormat::Gif => decode(GifDecoder::new(cursor), icc), ExchangeFormat::Gif => decode(GifDecoder::new(cursor), icc),
ExchangeFormat::Webp => decode(WebPDecoder::new(cursor), icc),
} }
.map_err(format_image_error)?; .map_err(format_image_error)?;
@ -85,6 +89,7 @@ impl RasterImage {
// Apply rotation from EXIF metadata. // Apply rotation from EXIF metadata.
if let Some(rotation) = exif.as_ref().and_then(exif_rotation) { if let Some(rotation) = exif.as_ref().and_then(exif_rotation) {
apply_rotation(&mut dynamic, rotation); apply_rotation(&mut dynamic, rotation);
exif_rot = Some(rotation);
} }
// Extract pixel density. // Extract pixel density.
@ -136,7 +141,14 @@ impl RasterImage {
} }
}; };
Ok(Self(Arc::new(Repr { data, format, dynamic, icc, dpi }))) Ok(Self(Arc::new(Repr {
data,
format,
exif_rotation: exif_rot,
dynamic: Arc::new(dynamic),
icc,
dpi,
})))
} }
/// The raw image data. /// The raw image data.
@ -159,6 +171,11 @@ impl RasterImage {
self.dynamic().height() self.dynamic().height()
} }
/// TODO.
pub fn exif_rotation(&self) -> Option<u32> {
self.0.exif_rotation
}
/// The image's pixel density in pixels per inch, if known. /// The image's pixel density in pixels per inch, if known.
/// ///
/// This is guaranteed to be positive. /// This is guaranteed to be positive.
@ -167,7 +184,7 @@ impl RasterImage {
} }
/// Access the underlying dynamic image. /// Access the underlying dynamic image.
pub fn dynamic(&self) -> &image::DynamicImage { pub fn dynamic(&self) -> &Arc<DynamicImage> {
&self.0.dynamic &self.0.dynamic
} }
@ -227,6 +244,8 @@ pub enum ExchangeFormat {
/// Raster format that is typically used for short animated clips. Typst can /// Raster format that is typically used for short animated clips. Typst can
/// load GIFs, but they will become static. /// load GIFs, but they will become static.
Gif, Gif,
/// Raster format that supports both lossy and lossless compression.
Webp,
} }
impl ExchangeFormat { impl ExchangeFormat {
@ -242,6 +261,7 @@ impl From<ExchangeFormat> for image::ImageFormat {
ExchangeFormat::Png => image::ImageFormat::Png, ExchangeFormat::Png => image::ImageFormat::Png,
ExchangeFormat::Jpg => image::ImageFormat::Jpeg, ExchangeFormat::Jpg => image::ImageFormat::Jpeg,
ExchangeFormat::Gif => image::ImageFormat::Gif, ExchangeFormat::Gif => image::ImageFormat::Gif,
ExchangeFormat::Webp => image::ImageFormat::WebP,
} }
} }
} }
@ -254,6 +274,7 @@ impl TryFrom<image::ImageFormat> for ExchangeFormat {
image::ImageFormat::Png => ExchangeFormat::Png, image::ImageFormat::Png => ExchangeFormat::Png,
image::ImageFormat::Jpeg => ExchangeFormat::Jpg, image::ImageFormat::Jpeg => ExchangeFormat::Jpg,
image::ImageFormat::Gif => ExchangeFormat::Gif, image::ImageFormat::Gif => ExchangeFormat::Gif,
image::ImageFormat::WebP => ExchangeFormat::Webp,
_ => bail!("format not yet supported"), _ => bail!("format not yet supported"),
}) })
} }
@ -325,12 +346,12 @@ fn apply_rotation(image: &mut DynamicImage, rotation: u32) {
ops::flip_horizontal_in_place(image); ops::flip_horizontal_in_place(image);
*image = image.rotate270(); *image = image.rotate270();
} }
6 => *image = image.rotate90(), 6 => *image = image.rotate270(),
7 => { 7 => {
ops::flip_horizontal_in_place(image); ops::flip_horizontal_in_place(image);
*image = image.rotate90(); *image = image.rotate90();
} }
8 => *image = image.rotate270(), 8 => *image = image.rotate90(),
_ => {} _ => {}
} }
} }

View File

@ -3,10 +3,9 @@ use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use comemo::Tracked; use comemo::Tracked;
use ecow::EcoString;
use siphasher::sip128::{Hasher128, SipHasher13}; use siphasher::sip128::{Hasher128, SipHasher13};
use crate::diag::{format_xml_like_error, StrResult}; use crate::diag::{format_xml_like_error, LoadError, LoadResult, ReportPos};
use crate::foundations::Bytes; use crate::foundations::Bytes;
use crate::layout::Axes; use crate::layout::Axes;
use crate::text::{ use crate::text::{
@ -30,7 +29,7 @@ impl SvgImage {
/// Decode an SVG image without fonts. /// Decode an SVG image without fonts.
#[comemo::memoize] #[comemo::memoize]
#[typst_macros::time(name = "load svg")] #[typst_macros::time(name = "load svg")]
pub fn new(data: Bytes) -> StrResult<SvgImage> { pub fn new(data: Bytes) -> LoadResult<SvgImage> {
let tree = let tree =
usvg::Tree::from_data(&data, &base_options()).map_err(format_usvg_error)?; usvg::Tree::from_data(&data, &base_options()).map_err(format_usvg_error)?;
Ok(Self(Arc::new(Repr { data, size: tree_size(&tree), font_hash: 0, tree }))) Ok(Self(Arc::new(Repr { data, size: tree_size(&tree), font_hash: 0, tree })))
@ -43,7 +42,7 @@ impl SvgImage {
data: Bytes, data: Bytes,
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
families: &[&str], families: &[&str],
) -> StrResult<SvgImage> { ) -> LoadResult<SvgImage> {
let book = world.book(); let book = world.book();
let resolver = Mutex::new(FontResolver::new(world, book, families)); let resolver = Mutex::new(FontResolver::new(world, book, families));
let tree = usvg::Tree::from_data( let tree = usvg::Tree::from_data(
@ -125,16 +124,15 @@ fn tree_size(tree: &usvg::Tree) -> Axes<f64> {
} }
/// Format the user-facing SVG decoding error message. /// Format the user-facing SVG decoding error message.
fn format_usvg_error(error: usvg::Error) -> EcoString { fn format_usvg_error(error: usvg::Error) -> LoadError {
match error { let error = match error {
usvg::Error::NotAnUtf8Str => "file is not valid utf-8".into(), usvg::Error::NotAnUtf8Str => "file is not valid utf-8",
usvg::Error::MalformedGZip => "file is not compressed correctly".into(), usvg::Error::MalformedGZip => "file is not compressed correctly",
usvg::Error::ElementsLimitReached => "file is too large".into(), usvg::Error::ElementsLimitReached => "file is too large",
usvg::Error::InvalidSize => { usvg::Error::InvalidSize => "width, height, or viewbox is invalid",
"failed to parse SVG (width, height, or viewbox is invalid)".into() usvg::Error::ParsingFailed(error) => return format_xml_like_error("SVG", error),
} };
usvg::Error::ParsingFailed(error) => format_xml_like_error("SVG", error), LoadError::new(ReportPos::None, "failed to parse SVG", error)
}
} }
/// Provides Typst's fonts to usvg. /// Provides Typst's fonts to usvg.

View File

@ -106,7 +106,7 @@ pub struct RectElem {
pub radius: Corners<Option<Rel<Length>>>, pub radius: Corners<Option<Rel<Length>>>,
/// How much to pad the rectangle's content. /// How much to pad the rectangle's content.
/// See the [box's documentation]($box.outset) for more details. /// See the [box's documentation]($box.inset) for more details.
#[resolve] #[resolve]
#[fold] #[fold]
#[default(Sides::splat(Some(Abs::pt(5.0).into())))] #[default(Sides::splat(Some(Abs::pt(5.0).into())))]

View File

@ -4,5 +4,5 @@ equation = Rovnice
bibliography = Bibliografie bibliography = Bibliografie
heading = Kapitola heading = Kapitola
outline = Obsah outline = Obsah
raw = Seznam raw = Výpis
page = strana page = strana

View File

@ -0,0 +1,8 @@
figure = Gambar
table = Tabel
equation = Persamaan
bibliography = Daftar Pustaka
heading = Bagian
outline = Daftar Isi
raw = Kode
page = halaman

View File

@ -0,0 +1,8 @@
figure = Attēls
table = Tabula
equation = Vienādojums
bibliography = Literatūra
heading = Sadaļa
outline = Saturs
raw = Saraksts
page = lpp.

View File

@ -185,6 +185,7 @@ struct Cast {
} }
/// A pattern in a cast, e.g.`"ascender"` or `v: i64`. /// A pattern in a cast, e.g.`"ascender"` or `v: i64`.
#[allow(clippy::large_enum_variant)]
enum Pattern { enum Pattern {
Str(syn::LitStr), Str(syn::LitStr),
Ty(syn::Pat, syn::Type), Ty(syn::Pat, syn::Type),

Some files were not shown because too many files have changed in this diff Show More