Switch PDF backend to krilla (#5420)

Co-authored-by: Laurenz <laurmaedje@gmail.com>
This commit is contained in:
Laurenz Stampfl 2025-04-01 16:42:52 +02:00 committed by GitHub
parent 012e14d40c
commit 96dd67e011
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 2426 additions and 4876 deletions

200
Cargo.lock generated
View File

@ -217,6 +217,20 @@ name = "bytemuck"
version = "1.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3"
dependencies = [
"bytemuck_derive",
]
[[package]]
name = "bytemuck_derive"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fa76293b4f7bb636ab88fd78228235b5248b4d05cc589aed610f954af5d7c7a"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "byteorder"
@ -735,11 +749,12 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.35"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
dependencies = [
"crc32fast",
"libz-rs-sys",
"miniz_oxide",
]
@ -749,6 +764,15 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
[[package]]
name = "float-cmp"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8"
dependencies = [
"num-traits",
]
[[package]]
name = "fnv"
version = "1.0.7"
@ -761,6 +785,15 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f"
[[package]]
name = "font-types"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d868ec188a98bb014c606072edd47e52e7ab7297db943b0b28503121e1d037bd"
dependencies = [
"bytemuck",
]
[[package]]
name = "fontconfig-parser"
version = "0.5.7"
@ -829,6 +862,15 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "getopts"
version = "0.2.21"
@ -966,7 +1008,7 @@ checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
dependencies = [
"displaydoc",
"serde",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec",
]
@ -1064,7 +1106,7 @@ dependencies = [
"stable_deref_trait",
"tinystr",
"writeable",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec",
]
@ -1310,6 +1352,48 @@ dependencies = [
"libc",
]
[[package]]
name = "krilla"
version = "0.3.0"
source = "git+https://github.com/LaurenzV/krilla?rev=14756f7#14756f7067cb1a80b73b712cae9f98597153e623"
dependencies = [
"base64",
"bumpalo",
"comemo",
"flate2",
"float-cmp 0.10.0",
"fxhash",
"gif",
"image-webp",
"imagesize",
"once_cell",
"pdf-writer",
"png",
"rayon",
"rustybuzz",
"siphasher",
"skrifa",
"subsetter",
"tiny-skia-path",
"xmp-writer",
"yoke 0.8.0",
"zune-jpeg",
]
[[package]]
name = "krilla-svg"
version = "0.3.0"
source = "git+https://github.com/LaurenzV/krilla?rev=14756f7#14756f7067cb1a80b73b712cae9f98597153e623"
dependencies = [
"flate2",
"fontdb",
"krilla",
"png",
"resvg",
"tiny-skia",
"usvg",
]
[[package]]
name = "kurbo"
version = "0.11.1"
@ -1371,6 +1455,15 @@ dependencies = [
"redox_syscall",
]
[[package]]
name = "libz-rs-sys"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "902bc563b5d65ad9bba616b490842ef0651066a1a1dc3ce1087113ffcb873c8d"
dependencies = [
"zlib-rs",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
@ -1458,9 +1551,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniz_oxide"
version = "0.8.3"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924"
checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
dependencies = [
"adler2",
"simd-adler32",
@ -1739,8 +1832,7 @@ checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3"
[[package]]
name = "pdf-writer"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5df03c7d216de06f93f398ef06f1385a60f2c597bb96f8195c8d98e08a26b1d5"
source = "git+https://github.com/typst/pdf-writer?rev=0d513b9#0d513b9050d2f1a0507cabb4898aca971af6da98"
dependencies = [
"bitflags 2.8.0",
"itoa",
@ -1997,6 +2089,16 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "read-fonts"
version = "0.27.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f004ee5c610b8beb5f33273246893ac6258ec22185a6eb8ee132bccdb904cdaa"
dependencies = [
"bytemuck",
"font-types",
]
[[package]]
name = "redox_syscall"
version = "0.5.8"
@ -2315,6 +2417,16 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "skrifa"
version = "0.28.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16e7936ca3627fdb516e97aca3c8ab5103f94ae32fe5ce80a0a02edcbacb7b53"
dependencies = [
"bytemuck",
"read-fonts",
]
[[package]]
name = "slotmap"
version = "1.0.7"
@ -2361,7 +2473,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6637bab7722d379c8b41ba849228d680cc12d0a45ba1fa2b48f2a30577a06731"
dependencies = [
"float-cmp",
"float-cmp 0.9.0",
]
[[package]]
@ -2405,27 +2517,9 @@ dependencies = [
[[package]]
name = "subsetter"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74f98178f34057d4d4de93d68104007c6dea4dfac930204a69ab4622daefa648"
[[package]]
name = "svg2pdf"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e50dc062439cc1a396181059c80932a6e6bd731b130e674c597c0c8874b6df22"
source = "git+https://github.com/typst/subsetter?rev=460fdb6#460fdb66d6e0138b721b1ca9882faf15ce003246"
dependencies = [
"fontdb",
"image",
"log",
"miniz_oxide",
"once_cell",
"pdf-writer",
"resvg",
"siphasher",
"subsetter",
"tiny-skia",
"ttf-parser",
"usvg",
"fxhash",
]
[[package]]
@ -3018,26 +3112,19 @@ dependencies = [
name = "typst-pdf"
version = "0.13.1"
dependencies = [
"arrayvec",
"base64",
"bytemuck",
"comemo",
"ecow",
"image",
"indexmap 2.7.1",
"miniz_oxide",
"pdf-writer",
"krilla",
"krilla-svg",
"serde",
"subsetter",
"svg2pdf",
"ttf-parser",
"typst-assets",
"typst-library",
"typst-macros",
"typst-syntax",
"typst-timing",
"typst-utils",
"xmp-writer",
]
[[package]]
@ -3662,8 +3749,7 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
[[package]]
name = "xmp-writer"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7eb5954c9ca6dcc869e98d3e42760ed9dab08f3e70212b31d7ab8ae7f3b7a487"
source = "git+https://github.com/LaurenzV/xmp-writer?rev=a1cbb887#a1cbb887a84376fea4d7590d41c194a332840549"
[[package]]
name = "xz2"
@ -3701,7 +3787,19 @@ checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive",
"yoke-derive 0.7.5",
"zerofrom",
]
[[package]]
name = "yoke"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
dependencies = [
"serde",
"stable_deref_trait",
"yoke-derive 0.8.0",
"zerofrom",
]
@ -3717,6 +3815,18 @@ dependencies = [
"synstructure",
]
[[package]]
name = "yoke-derive"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
dependencies = [
"proc-macro2",
"quote",
"syn",
"synstructure",
]
[[package]]
name = "zerocopy"
version = "0.7.35"
@ -3778,7 +3888,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
dependencies = [
"serde",
"yoke",
"yoke 0.7.5",
"zerofrom",
"zerovec-derive",
]
@ -3809,6 +3919,12 @@ dependencies = [
"zopfli",
]
[[package]]
name = "zlib-rs"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b20717f0917c908dc63de2e44e97f1e6b126ca58d0e391cee86d504eb8fbd05"
[[package]]
name = "zopfli"
version = "0.8.1"

View File

@ -71,11 +71,12 @@ if_chain = "1"
image = { version = "0.25.5", default-features = false, features = ["png", "jpeg", "gif"] }
indexmap = { version = "2", features = ["serde"] }
kamadak-exif = "0.6"
krilla = { git = "https://github.com/LaurenzV/krilla", rev = "14756f7", default-features = false, features = ["raster-images", "comemo", "rayon"] }
krilla-svg = { git = "https://github.com/LaurenzV/krilla", rev = "14756f7" }
kurbo = "0.11"
libfuzzer-sys = "0.4"
lipsum = "0.9"
memchr = "2"
miniz_oxide = "0.8"
native-tls = "0.2"
notify = "8"
once_cell = "1"
@ -113,7 +114,6 @@ siphasher = "1"
smallvec = { version = "1.11.1", features = ["union", "const_generics", "const_new"] }
stacker = "0.1.15"
subsetter = "0.2"
svg2pdf = "0.13"
syn = { version = "2", features = ["full", "extra-traits"] }
syntect = { version = "5", default-features = false, features = ["parsing", "regex-fancy", "plist-load", "yaml-load"] }
tar = "0.4"
@ -140,7 +140,6 @@ wasmi = "0.40.0"
web-sys = "0.3"
xmlparser = "0.13.5"
xmlwriter = "0.1.0"
xmp-writer = "0.3.1"
xz2 = { version = "0.1", features = ["static"] }
yaml-front-matter = "0.1"
zip = { version = "2.5", default-features = false, features = ["deflate"] }

View File

@ -467,15 +467,45 @@ display_possible_values!(Feature);
#[derive(Debug, Copy, Clone, Eq, PartialEq, ValueEnum)]
#[allow(non_camel_case_types)]
pub enum PdfStandard {
/// PDF 1.4.
#[value(name = "1.4")]
V_1_4,
/// PDF 1.5.
#[value(name = "1.5")]
V_1_5,
/// PDF 1.5.
#[value(name = "1.6")]
V_1_6,
/// PDF 1.7.
#[value(name = "1.7")]
V_1_7,
/// PDF 2.0.
#[value(name = "2.0")]
V_2_0,
/// PDF/A-1b.
#[value(name = "a-1b")]
A_1b,
/// PDF/A-2b.
#[value(name = "a-2b")]
A_2b,
/// PDF/A-3b.
/// PDF/A-2u.
#[value(name = "a-2u")]
A_2u,
/// PDF/A-3u.
#[value(name = "a-3b")]
A_3b,
/// PDF/A-3u.
#[value(name = "a-3u")]
A_3u,
/// PDF/A-4.
#[value(name = "a-4")]
A_4,
/// PDF/A-4f.
#[value(name = "a-4f")]
A_4f,
/// PDF/A-4e.
#[value(name = "a-4e")]
A_4e,
}
display_possible_values!(PdfStandard);

View File

@ -63,8 +63,7 @@ pub struct CompileConfig {
/// Opens the output file with the default viewer or a specific program after
/// compilation.
pub open: Option<Option<String>>,
/// One (or multiple comma-separated) PDF standards that Typst will enforce
/// conformance with.
/// A list of standards the PDF should conform to.
pub pdf_standards: PdfStandards,
/// A path to write a Makefile rule describing the current compilation.
pub make_deps: Option<PathBuf>,
@ -130,18 +129,9 @@ impl CompileConfig {
PageRanges::new(export_ranges.iter().map(|r| r.0.clone()).collect())
});
let pdf_standards = {
let list = args
.pdf_standard
.iter()
.map(|standard| match standard {
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
})
.collect::<Vec<_>>();
PdfStandards::new(&list)?
};
let pdf_standards = PdfStandards::new(
&args.pdf_standard.iter().copied().map(Into::into).collect::<Vec<_>>(),
)?;
#[cfg(feature = "http-server")]
let server = match watch {
@ -295,6 +285,7 @@ fn export_pdf(document: &PagedDocument, config: &CompileConfig) -> SourceResult<
})
}
};
let options = PdfOptions {
ident: Smart::Auto,
timestamp,
@ -765,3 +756,23 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
})
}
}
impl From<PdfStandard> for typst_pdf::PdfStandard {
fn from(standard: PdfStandard) -> Self {
match standard {
PdfStandard::V_1_4 => typst_pdf::PdfStandard::V_1_4,
PdfStandard::V_1_5 => typst_pdf::PdfStandard::V_1_5,
PdfStandard::V_1_6 => typst_pdf::PdfStandard::V_1_6,
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
PdfStandard::V_2_0 => typst_pdf::PdfStandard::V_2_0,
PdfStandard::A_1b => typst_pdf::PdfStandard::A_1b,
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
PdfStandard::A_2u => typst_pdf::PdfStandard::A_2u,
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
PdfStandard::A_3u => typst_pdf::PdfStandard::A_3u,
PdfStandard::A_4 => typst_pdf::PdfStandard::A_4,
PdfStandard::A_4f => typst_pdf::PdfStandard::A_4f,
PdfStandard::A_4e => typst_pdf::PdfStandard::A_4e,
}
}
}

View File

@ -824,12 +824,42 @@ fn shape_segment<'a>(
// Add the glyph to the shaped output.
if info.glyph_id != 0 && is_covered(cluster) {
// Determine the text range of the glyph.
// Assume we have the following sequence of (glyph_id, cluster):
// [(120, 0), (80, 0), (3, 3), (755, 4), (69, 4), (424, 13),
// (63, 13), (193, 25), (80, 25), (3, 31)
//
// We then want the sequence of (glyph_id, text_range) to look as follows:
// [(120, 0..3), (80, 0..3), (3, 3..4), (755, 4..13), (69, 4..13),
// (424, 13..25), (63, 13..25), (193, 25..31), (80, 25..31), (3, 31..x)]
//
// Each glyph in the same cluster should be assigned the full text
// range. This is necessary because only this way krilla can
// properly assign `ActualText` attributes in complex shaping
// scenarios.
// The start of the glyph's text range.
let start = base + cluster;
let end = base
+ if ltr { i.checked_add(1) } else { i.checked_sub(1) }
.and_then(|last| infos.get(last))
.map_or(text.len(), |info| info.cluster as usize);
// Determine the end of the glyph's text range.
let mut k = i;
let step: isize = if ltr { 1 } else { -1 };
let end = loop {
// If we've reached the end of the glyphs, the `end` of the
// range should be the end of the full text.
let Some((next, next_info)) = k
.checked_add_signed(step)
.and_then(|n| infos.get(n).map(|info| (n, info)))
else {
break base + text.len();
};
// If the cluster doesn't match anymore, we've reached the end.
if next_info.cluster != info.cluster {
break base + next_info.cluster as usize;
}
k = next;
};
let c = text[cluster..].chars().next().unwrap();
let script = c.script();

View File

@ -307,6 +307,20 @@ impl Transform {
Self { sx, sy, ..Self::identity() }
}
/// A scale transform at a specific position.
pub fn scale_at(sx: Ratio, sy: Ratio, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::scale(sx, sy))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform at a specific position.
pub fn rotate_at(angle: Angle, px: Abs, py: Abs) -> Self {
Self::translate(px, py)
.pre_concat(Self::rotate(angle))
.pre_concat(Self::translate(-px, -py))
}
/// A rotate transform.
pub fn rotate(angle: Angle) -> Self {
let cos = Ratio::new(angle.cos());

View File

@ -3,6 +3,8 @@ use std::hash::{Hash, Hasher};
use std::io;
use std::sync::Arc;
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
use ecow::{eco_format, EcoString};
use image::codecs::gif::GifDecoder;
use image::codecs::jpeg::JpegDecoder;
@ -11,9 +13,6 @@ use image::{
guess_format, DynamicImage, ImageBuffer, ImageDecoder, ImageResult, Limits, Pixel,
};
use crate::diag::{bail, StrResult};
use crate::foundations::{cast, dict, Bytes, Cast, Dict, Smart, Value};
/// A decoded raster image.
#[derive(Clone, Hash)]
pub struct RasterImage(Arc<Repr>);
@ -22,7 +21,8 @@ pub struct RasterImage(Arc<Repr>);
struct Repr {
data: Bytes,
format: RasterFormat,
dynamic: image::DynamicImage,
dynamic: Arc<DynamicImage>,
exif_rotation: Option<u32>,
icc: Option<Bytes>,
dpi: Option<f64>,
}
@ -50,6 +50,8 @@ impl RasterImage {
format: RasterFormat,
icc: Smart<Bytes>,
) -> StrResult<RasterImage> {
let mut exif_rot = None;
let (dynamic, icc, dpi) = match format {
RasterFormat::Exchange(format) => {
fn decode<T: ImageDecoder>(
@ -85,6 +87,7 @@ impl RasterImage {
// Apply rotation from EXIF metadata.
if let Some(rotation) = exif.as_ref().and_then(exif_rotation) {
apply_rotation(&mut dynamic, rotation);
exif_rot = Some(rotation);
}
// Extract pixel density.
@ -136,7 +139,14 @@ impl RasterImage {
}
};
Ok(Self(Arc::new(Repr { data, format, dynamic, icc, dpi })))
Ok(Self(Arc::new(Repr {
data,
format,
exif_rotation: exif_rot,
dynamic: Arc::new(dynamic),
icc,
dpi,
})))
}
/// The raw image data.
@ -159,6 +169,11 @@ impl RasterImage {
self.dynamic().height()
}
/// TODO.
pub fn exif_rotation(&self) -> Option<u32> {
self.0.exif_rotation
}
/// The image's pixel density in pixels per inch, if known.
///
/// This is guaranteed to be positive.
@ -167,7 +182,7 @@ impl RasterImage {
}
/// Access the underlying dynamic image.
pub fn dynamic(&self) -> &image::DynamicImage {
pub fn dynamic(&self) -> &Arc<DynamicImage> {
&self.0.dynamic
}

View File

@ -19,20 +19,13 @@ typst-macros = { workspace = true }
typst-syntax = { workspace = true }
typst-timing = { workspace = true }
typst-utils = { workspace = true }
arrayvec = { workspace = true }
base64 = { workspace = true }
bytemuck = { workspace = true }
comemo = { workspace = true }
ecow = { workspace = true }
image = { workspace = true }
indexmap = { workspace = true }
miniz_oxide = { workspace = true }
pdf-writer = { workspace = true }
krilla = { workspace = true }
krilla-svg = { workspace = true }
serde = { workspace = true }
subsetter = { workspace = true }
svg2pdf = { workspace = true }
ttf-parser = { workspace = true }
xmp-writer = { workspace = true }
[lints]
workspace = true

View File

@ -1,385 +0,0 @@
use std::num::NonZeroUsize;
use ecow::eco_format;
use pdf_writer::types::Direction;
use pdf_writer::writers::PageLabel;
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{Datetime, Smart};
use typst_library::layout::Dir;
use typst_library::text::Lang;
use typst_syntax::Span;
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
use crate::page::PdfPageLabel;
use crate::{hash_base64, outline, TextStrExt, Timestamp, Timezone, WithEverything};
/// Write the document catalog.
pub fn write_catalog(
ctx: WithEverything,
pdf: &mut Pdf,
alloc: &mut Ref,
) -> SourceResult<()> {
let lang = ctx
.resources
.languages
.iter()
.max_by_key(|(_, &count)| count)
.map(|(&l, _)| l);
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
Direction::R2L
} else {
Direction::L2R
};
// Write the outline tree.
let outline_root_id = outline::write_outline(pdf, alloc, &ctx);
// Write the page labels.
let page_labels = write_page_labels(pdf, alloc, &ctx);
// Write the document information.
let info_ref = alloc.bump();
let mut info = pdf.document_info(info_ref);
let mut xmp = XmpWriter::new();
if let Some(title) = &ctx.document.info.title {
info.title(TextStr::trimmed(title));
xmp.title([(None, title.as_str())]);
}
if let Some(description) = &ctx.document.info.description {
info.subject(TextStr::trimmed(description));
xmp.description([(None, description.as_str())]);
}
let authors = &ctx.document.info.author;
if !authors.is_empty() {
// Turns out that if the authors are given in both the document
// information dictionary and the XMP metadata, Acrobat takes a little
// bit of both: The first author from the document information
// dictionary and the remaining authors from the XMP metadata.
//
// To fix this for Acrobat, we could omit the remaining authors or all
// metadata from the document information catalog (it is optional) and
// only write XMP. However, not all other tools (including Apple
// Preview) read the XMP data. This means we do want to include all
// authors in the document information dictionary.
//
// Thus, the only alternative is to fold all authors into a single
// `<rdf:li>` in the XMP metadata. This is, in fact, exactly what the
// PDF/A spec Part 1 section 6.7.3 has to say about the matter. It's a
// bit weird to not use the array (and it makes Acrobat show the author
// list in quotes), but there's not much we can do about that.
let joined = authors.join(", ");
info.author(TextStr::trimmed(&joined));
xmp.creator([joined.as_str()]);
}
let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
info.creator(TextStr(&creator));
xmp.creator_tool(&creator);
let keywords = &ctx.document.info.keywords;
if !keywords.is_empty() {
let joined = keywords.join(", ");
info.keywords(TextStr::trimmed(&joined));
xmp.pdf_keywords(&joined);
}
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
info.creation_date(pdf_date);
info.modified_date(pdf_date);
}
info.finish();
// A unique ID for this instance of the document. Changes if anything
// changes in the frames.
let instance_id = hash_base64(&pdf.as_bytes());
// Determine the document's ID. It should be as stable as possible.
const PDF_VERSION: &str = "PDF-1.7";
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
// We were provided with a stable ID. Yay!
hash_base64(&(PDF_VERSION, ident))
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
// If not provided from the outside, but title and author were given, we
// compute a hash of them, which should be reasonably stable and unique.
hash_base64(&(PDF_VERSION, &ctx.document.info.title, &ctx.document.info.author))
} else {
// The user provided no usable metadata which we can use as an `/ID`.
instance_id.clone()
};
xmp.document_id(&doc_id);
xmp.instance_id(&instance_id);
xmp.format("application/pdf");
xmp.pdf_version("1.7");
xmp.language(ctx.resources.languages.keys().map(|lang| LangId(lang.as_str())));
xmp.num_pages(ctx.document.pages.len() as u32);
xmp.rendition_class(RenditionClass::Proof);
if let Some(xmp_date) = date.and_then(|date| xmp_date(date, tz)) {
xmp.create_date(xmp_date);
xmp.modify_date(xmp_date);
if ctx.options.standards.pdfa {
let mut history = xmp.history();
history
.add_event()
.action(xmp_writer::ResourceEventAction::Saved)
.when(xmp_date)
.instance_id(&eco_format!("{instance_id}_source"));
history
.add_event()
.action(xmp_writer::ResourceEventAction::Converted)
.when(xmp_date)
.instance_id(&instance_id)
.software_agent(&creator);
}
}
// Assert dominance.
if let Some((part, conformance)) = ctx.options.standards.pdfa_part {
let mut extension_schemas = xmp.extension_schemas();
extension_schemas
.xmp_media_management()
.properties()
.describe_instance_id();
extension_schemas.pdf().properties().describe_all();
extension_schemas.finish();
xmp.pdfa_part(part);
xmp.pdfa_conformance(conformance);
}
let xmp_buf = xmp.finish(None);
let meta_ref = alloc.bump();
pdf.stream(meta_ref, xmp_buf.as_bytes())
.pair(Name(b"Type"), Name(b"Metadata"))
.pair(Name(b"Subtype"), Name(b"XML"));
// Set IDs only now, so that we don't need to clone them.
pdf.set_file_id((doc_id.into_bytes(), instance_id.into_bytes()));
// Write the document catalog.
let catalog_ref = alloc.bump();
let mut catalog = pdf.catalog(catalog_ref);
catalog.pages(ctx.page_tree_ref);
catalog.viewer_preferences().direction(dir);
catalog.metadata(meta_ref);
let has_dests = !ctx.references.named_destinations.dests.is_empty();
let has_embeddings = !ctx.references.embedded_files.is_empty();
// Write the `/Names` dictionary.
if has_dests || has_embeddings {
// Write the named destination tree if there are any entries.
let mut name_dict = catalog.names();
if has_dests {
let mut dests_name_tree = name_dict.destinations();
let mut names = dests_name_tree.names();
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
names.insert(Str(name.resolve().as_bytes()), dest_ref);
}
}
if has_embeddings {
let mut embedded_files = name_dict.embedded_files();
let mut names = embedded_files.names();
for (name, file_ref) in &ctx.references.embedded_files {
names.insert(Str(name.as_bytes()), *file_ref);
}
}
}
if has_embeddings && ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
let mut associated_files = catalog.insert(Name(b"AF")).array().typed();
for (_, file_ref) in ctx.references.embedded_files {
associated_files.item(file_ref).finish();
}
}
// Insert the page labels.
if !page_labels.is_empty() {
let mut num_tree = catalog.page_labels();
let mut entries = num_tree.nums();
for (n, r) in &page_labels {
entries.insert(n.get() as i32 - 1, *r);
}
}
if let Some(outline_root_id) = outline_root_id {
catalog.outlines(outline_root_id);
}
if let Some(lang) = lang {
catalog.lang(TextStr(lang.as_str()));
}
if ctx.options.standards.pdfa {
catalog
.output_intents()
.push()
.subtype(pdf_writer::types::OutputIntentSubtype::PDFA)
.output_condition(TextStr("sRGB"))
.output_condition_identifier(TextStr("Custom"))
.info(TextStr("sRGB IEC61966-2.1"))
.dest_output_profile(ctx.globals.color_functions.srgb.unwrap());
}
catalog.finish();
if ctx.options.standards.pdfa && pdf.refs().count() > 8388607 {
bail!(Span::detached(), "too many PDF objects");
}
Ok(())
}
/// Write the page labels.
pub(crate) fn write_page_labels(
chunk: &mut Pdf,
alloc: &mut Ref,
ctx: &WithEverything,
) -> Vec<(NonZeroUsize, Ref)> {
// If there is no exported page labeled, we skip the writing
if !ctx.pages.iter().filter_map(Option::as_ref).any(|p| {
p.label
.as_ref()
.is_some_and(|l| l.prefix.is_some() || l.style.is_some())
}) {
return Vec::new();
}
let empty_label = PdfPageLabel::default();
let mut result = vec![];
let mut prev: Option<&PdfPageLabel> = None;
// Skip non-exported pages for numbering.
for (i, page) in ctx.pages.iter().filter_map(Option::as_ref).enumerate() {
let nr = NonZeroUsize::new(1 + i).unwrap();
// If there are pages with empty labels between labeled pages, we must
// write empty PageLabel entries.
let label = page.label.as_ref().unwrap_or(&empty_label);
if let Some(pre) = prev {
if label.prefix == pre.prefix
&& label.style == pre.style
&& label.offset == pre.offset.map(|n| n.saturating_add(1))
{
prev = Some(label);
continue;
}
}
let id = alloc.bump();
let mut entry = chunk.indirect(id).start::<PageLabel>();
// Only add what is actually provided. Don't add empty prefix string if
// it wasn't given for example.
if let Some(prefix) = &label.prefix {
entry.prefix(TextStr::trimmed(prefix));
}
if let Some(style) = label.style {
entry.style(style.to_pdf_numbering_style());
}
if let Some(offset) = label.offset {
entry.offset(offset.get() as i32);
}
result.push((nr, id));
prev = Some(label);
}
result
}
/// Resolve the document date.
///
/// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
/// (2) If the `document.date` is set to `auto` or not set, try to use the
/// date from the options.
/// (3) Otherwise, we don't write date metadata.
pub fn document_date(
document_date: Smart<Option<Datetime>>,
timestamp: Option<Timestamp>,
) -> (Option<Datetime>, Option<Timezone>) {
match (document_date, timestamp) {
(Smart::Custom(date), _) => (date, None),
(Smart::Auto, Some(timestamp)) => {
(Some(timestamp.datetime), Some(timestamp.timezone))
}
_ => (None, None),
}
}
/// Converts a datetime to a pdf-writer date.
pub fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let mut pdf_date = pdf_writer::Date::new(year);
if let Some(month) = datetime.month() {
pdf_date = pdf_date.month(month);
}
if let Some(day) = datetime.day() {
pdf_date = pdf_date.day(day);
}
if let Some(h) = datetime.hour() {
pdf_date = pdf_date.hour(h);
}
if let Some(m) = datetime.minute() {
pdf_date = pdf_date.minute(m);
}
if let Some(s) = datetime.second() {
pdf_date = pdf_date.second(s);
}
match tz {
Some(Timezone::UTC) => {
pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0)
}
Some(Timezone::Local { hour_offset, minute_offset }) => {
pdf_date =
pdf_date.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
}
None => {}
}
Some(pdf_date)
}
/// Converts a datetime to an xmp-writer datetime.
fn xmp_date(
datetime: Datetime,
timezone: Option<Timezone>,
) -> Option<xmp_writer::DateTime> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let timezone = timezone.map(|tz| match tz {
Timezone::UTC => xmp_writer::Timezone::Utc,
Timezone::Local { hour_offset, minute_offset } => {
// The xmp-writer use signed integers for the minute offset, which
// can be buggy if the minute offset is negative. And because our
// minute_offset is ensured to be `0 <= minute_offset < 60`, we can
// safely cast it to a signed integer.
xmp_writer::Timezone::Local { hour: hour_offset, minute: minute_offset as i8 }
}
});
Some(DateTime {
year,
month: datetime.month(),
day: datetime.day(),
hour: datetime.hour(),
minute: datetime.minute(),
second: datetime.second(),
timezone,
})
}

View File

@ -1,394 +0,0 @@
use std::sync::LazyLock;
use arrayvec::ArrayVec;
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
use typst_library::diag::{bail, SourceResult};
use typst_library::visualize::{Color, ColorSpace, Paint};
use typst_syntax::Span;
use crate::{content, deflate, PdfChunk, PdfOptions, Renumber, WithResources};
// The names of the color spaces.
pub const SRGB: Name<'static> = Name(b"srgb");
pub const D65_GRAY: Name<'static> = Name(b"d65gray");
pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
// The ICC profiles.
static SRGB_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_RGB_V4));
static GRAY_ICC_DEFLATED: LazyLock<Vec<u8>> =
LazyLock::new(|| deflate(typst_assets::icc::S_GREY_V4));
/// The color spaces present in the PDF document
#[derive(Default)]
pub struct ColorSpaces {
use_srgb: bool,
use_d65_gray: bool,
use_linear_rgb: bool,
}
impl ColorSpaces {
/// Mark a color space as used.
pub fn mark_as_used(&mut self, color_space: ColorSpace) {
match color_space {
ColorSpace::Oklch
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Srgb => {
self.use_srgb = true;
}
ColorSpace::D65Gray => {
self.use_d65_gray = true;
}
ColorSpace::LinearRgb => {
self.use_linear_rgb = true;
}
ColorSpace::Cmyk => {}
}
}
/// Write the color spaces to the PDF file.
pub fn write_color_spaces(&self, mut spaces: Dict, refs: &ColorFunctionRefs) {
if self.use_srgb {
write(ColorSpace::Srgb, spaces.insert(SRGB).start(), refs);
}
if self.use_d65_gray {
write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), refs);
}
if self.use_linear_rgb {
write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), refs);
}
}
/// Write the necessary color spaces functions and ICC profiles to the
/// PDF file.
pub fn write_functions(&self, chunk: &mut Chunk, refs: &ColorFunctionRefs) {
// Write the sRGB color space.
if let Some(id) = refs.srgb {
chunk
.icc_profile(id, &SRGB_ICC_DEFLATED)
.n(3)
.range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
.filter(Filter::FlateDecode);
}
// Write the gray color space.
if let Some(id) = refs.d65_gray {
chunk
.icc_profile(id, &GRAY_ICC_DEFLATED)
.n(1)
.range([0.0, 1.0])
.filter(Filter::FlateDecode);
}
}
/// Merge two color space usage information together: a given color space is
/// considered to be used if it is used on either side.
pub fn merge(&mut self, other: &Self) {
self.use_d65_gray |= other.use_d65_gray;
self.use_linear_rgb |= other.use_linear_rgb;
self.use_srgb |= other.use_srgb;
}
}
/// Write the color space.
pub fn write(
color_space: ColorSpace,
writer: writers::ColorSpace,
refs: &ColorFunctionRefs,
) {
match color_space {
ColorSpace::Srgb
| ColorSpace::Oklab
| ColorSpace::Hsl
| ColorSpace::Hsv
| ColorSpace::Oklch => writer.icc_based(refs.srgb.unwrap()),
ColorSpace::D65Gray => writer.icc_based(refs.d65_gray.unwrap()),
ColorSpace::LinearRgb => {
writer.cal_rgb(
[0.9505, 1.0, 1.0888],
None,
Some([1.0, 1.0, 1.0]),
Some([
0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722, 0.9505,
]),
);
}
ColorSpace::Cmyk => writer.device_cmyk(),
}
}
/// Global references for color conversion functions.
///
/// These functions are only written once (at most, they are not written if not
/// needed) in the final document, and be shared by all color space
/// dictionaries.
pub struct ColorFunctionRefs {
pub srgb: Option<Ref>,
d65_gray: Option<Ref>,
}
impl Renumber for ColorFunctionRefs {
fn renumber(&mut self, offset: i32) {
if let Some(r) = &mut self.srgb {
r.renumber(offset);
}
if let Some(r) = &mut self.d65_gray {
r.renumber(offset);
}
}
}
/// Allocate all necessary [`ColorFunctionRefs`].
pub fn alloc_color_functions_refs(
context: &WithResources,
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
let mut chunk = PdfChunk::new();
let mut used_color_spaces = ColorSpaces::default();
if context.options.standards.pdfa {
used_color_spaces.mark_as_used(ColorSpace::Srgb);
}
context.resources.traverse(&mut |r| {
used_color_spaces.merge(&r.colors);
Ok(())
})?;
let refs = ColorFunctionRefs {
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
};
Ok((chunk, refs))
}
/// Encodes the color into four f32s, which can be used in a PDF file.
/// Ensures that the values are in the range [0.0, 1.0].
///
/// # Why?
/// - Oklab: The a and b components are in the range [-0.5, 0.5] and the PDF
/// specifies (and some readers enforce) that all color values be in the range
/// [0.0, 1.0]. This means that the PostScript function and the encoded color
/// must be offset by 0.5.
/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
/// specifies that it must be in the range [0.0, 1.0]. This means that the
/// PostScript function and the encoded color must be divided by 360.0.
pub trait ColorEncode {
/// Performs the color to PDF f32 array conversion.
fn encode(&self, color: Color) -> [f32; 4];
}
impl ColorEncode for ColorSpace {
fn encode(&self, color: Color) -> [f32; 4] {
match self {
ColorSpace::Oklab | ColorSpace::Oklch | ColorSpace::Hsl | ColorSpace::Hsv => {
color.to_space(ColorSpace::Srgb).to_vec4()
}
_ => color.to_space(*self).to_vec4(),
}
}
}
/// Encodes a paint into either a fill or stroke color.
pub(super) trait PaintEncode {
/// Set the paint as the fill color.
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
/// Set the paint as the stroke color.
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()>;
}
impl PaintEncode for Paint {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_fill(ctx, on_text, transforms),
}
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
match self {
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
Self::Tiling(tiling) => tiling.set_as_stroke(ctx, on_text, transforms),
}
}
}
impl PaintEncode for Color {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_fill_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_fill_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_fill_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_fill_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_fill_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_fill_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_fill_cmyk(c, m, y, k);
}
}
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
_: bool,
_: content::Transforms,
) -> SourceResult<()> {
match self {
Color::Luma(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
ctx.set_stroke_color_space(D65_GRAY);
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
ctx.content.set_stroke_color([l]);
}
Color::LinearRgb(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
ctx.set_stroke_color_space(LINEAR_SRGB);
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
// Oklab & friends are encoded as RGB.
Color::Rgb(_)
| Color::Oklab(_)
| Color::Oklch(_)
| Color::Hsl(_)
| Color::Hsv(_) => {
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
ctx.set_stroke_color_space(SRGB);
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
ctx.content.set_stroke_color([r, g, b]);
}
Color::Cmyk(_) => {
check_cmyk_allowed(ctx.options)?;
ctx.reset_stroke_color_space();
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
ctx.content.set_stroke_cmyk(c, m, y, k);
}
}
Ok(())
}
}
/// Extra color space functions.
pub(super) trait ColorSpaceExt {
/// Returns the range of the color space.
fn range(self) -> &'static [f32];
/// Converts a color to the color space.
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4>;
}
impl ColorSpaceExt for ColorSpace {
fn range(self) -> &'static [f32] {
match self {
ColorSpace::D65Gray => &[0.0, 1.0],
ColorSpace::Oklab => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Oklch => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::LinearRgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Srgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Cmyk => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsl => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
ColorSpace::Hsv => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
}
}
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4> {
let components = self.encode(color);
self.range()
.chunks(2)
.zip(components)
.map(|(range, component)| U::quantize(component, [range[0], range[1]]))
.collect()
}
}
/// Quantizes a color component to a specific type.
pub(super) trait QuantizedColor {
fn quantize(color: f32, range: [f32; 2]) -> Self;
}
impl QuantizedColor for u16 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
let value = (color - min) / (max - min);
(value * Self::MAX as f32).round().clamp(0.0, Self::MAX as f32) as Self
}
}
impl QuantizedColor for f32 {
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
color.clamp(min, max)
}
}
/// Fails with an error if PDF/A processing is enabled.
pub(super) fn check_cmyk_allowed(options: &PdfOptions) -> SourceResult<()> {
if options.standards.pdfa {
bail!(
Span::detached(),
"cmyk colors are not currently supported by PDF/A export"
);
}
Ok(())
}

View File

@ -1,344 +0,0 @@
//! OpenType fonts generally define monochrome glyphs, but they can also define
//! glyphs with colors. This is how emojis are generally implemented for
//! example.
//!
//! There are various standards to represent color glyphs, but PDF readers don't
//! support any of them natively, so Typst has to handle them manually.
use std::collections::HashMap;
use ecow::eco_format;
use indexmap::IndexMap;
use pdf_writer::types::UnicodeCmap;
use pdf_writer::writers::WMode;
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::Em;
use typst_library::text::color::glyph_frame;
use typst_library::text::{Font, Glyph, TextItemView};
use crate::font::{base_font_name, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
use crate::resources::{Resources, ResourcesRefs};
use crate::{content, EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
/// Write color fonts in the PDF document.
///
/// They are written as Type3 fonts, which map glyph IDs to arbitrary PDF
/// instructions.
pub fn write_color_fonts(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
let mut out = HashMap::new();
let mut chunk = PdfChunk::new();
context.resources.traverse(&mut |resources: &Resources| {
let Some(color_fonts) = &resources.color_fonts else {
return Ok(());
};
for (color_font, font_slice) in color_fonts.iter() {
if out.contains_key(&font_slice) {
continue;
}
// Allocate some IDs.
let subfont_id = chunk.alloc();
let cmap_ref = chunk.alloc();
let descriptor_ref = chunk.alloc();
let widths_ref = chunk.alloc();
// And a map between glyph IDs and the instructions to draw this
// glyph.
let mut glyphs_to_instructions = Vec::new();
let start = font_slice.subfont * 256;
let end = (start + 256).min(color_font.glyphs.len());
let glyph_count = end - start;
let subset = &color_font.glyphs[start..end];
let mut widths = Vec::new();
let mut gids = Vec::new();
let scale_factor = font_slice.font.ttf().units_per_em() as f32;
// Write the instructions for each glyph.
for color_glyph in subset {
let instructions_stream_ref = chunk.alloc();
let width = font_slice
.font
.advance(color_glyph.gid)
.unwrap_or(Em::new(0.0))
.get() as f32
* scale_factor;
widths.push(width);
chunk
.stream(
instructions_stream_ref,
color_glyph.instructions.content.wait(),
)
.filter(Filter::FlateDecode);
// Use this stream as instructions to draw the glyph.
glyphs_to_instructions.push(instructions_stream_ref);
gids.push(color_glyph.gid);
}
// Determine the base font name.
gids.sort();
let base_font = base_font_name(&font_slice.font, &gids);
// Write the Type3 font object.
let mut pdf_font = chunk.type3_font(subfont_id);
pdf_font.name(Name(base_font.as_bytes()));
pdf_font.pair(Name(b"Resources"), color_fonts.resources.reference);
pdf_font.bbox(color_font.bbox);
pdf_font.matrix([1.0 / scale_factor, 0.0, 0.0, 1.0 / scale_factor, 0.0, 0.0]);
pdf_font.first_char(0);
pdf_font.last_char((glyph_count - 1) as u8);
pdf_font.pair(Name(b"Widths"), widths_ref);
pdf_font.to_unicode(cmap_ref);
pdf_font.font_descriptor(descriptor_ref);
// Write the /CharProcs dictionary, that maps glyph names to
// drawing instructions.
let mut char_procs = pdf_font.char_procs();
for (gid, instructions_ref) in glyphs_to_instructions.iter().enumerate() {
char_procs
.pair(Name(eco_format!("glyph{gid}").as_bytes()), *instructions_ref);
}
char_procs.finish();
// Write the /Encoding dictionary.
let names = (0..glyph_count)
.map(|gid| eco_format!("glyph{gid}"))
.collect::<Vec<_>>();
pdf_font
.encoding_custom()
.differences()
.consecutive(0, names.iter().map(|name| Name(name.as_bytes())));
pdf_font.finish();
// Encode a CMAP to make it possible to search or copy glyphs.
let glyph_set = resources.color_glyph_sets.get(&font_slice.font).unwrap();
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
for (index, glyph) in subset.iter().enumerate() {
let Some(text) = glyph_set.get(&glyph.gid) else {
continue;
};
if !text.is_empty() {
cmap.pair_with_multiple(index as u8, text.chars());
}
}
chunk.cmap(cmap_ref, &cmap.finish()).writing_mode(WMode::Horizontal);
// Write the font descriptor.
write_font_descriptor(
&mut chunk,
descriptor_ref,
&font_slice.font,
&base_font,
);
// Write the widths array
chunk.indirect(widths_ref).array().items(widths);
out.insert(font_slice, subfont_id);
}
Ok(())
})?;
Ok((chunk, out))
}
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
///
/// This mapping is one-to-many because there can only be 256 glyphs in a Type 3
/// font, and fonts generally have more color glyphs than that.
pub struct ColorFontMap<R> {
/// The mapping itself.
map: IndexMap<Font, ColorFont>,
/// The resources required to render the fonts in this map.
///
/// For example, this can be the images for glyphs based on bitmaps or SVG.
pub resources: Resources<R>,
/// The number of font slices (groups of 256 color glyphs), across all color
/// fonts.
total_slice_count: usize,
}
/// A collection of Type3 font, belonging to the same TTF font.
pub struct ColorFont {
/// The IDs of each sub-slice of this font. They are the numbers after "Cf"
/// in the Resources dictionaries.
slice_ids: Vec<usize>,
/// The list of all color glyphs in this family.
///
/// The index in this vector modulo 256 corresponds to the index in one of
/// the Type3 fonts in `refs` (the `n`-th in the vector, where `n` is the
/// quotient of the index divided by 256).
pub glyphs: Vec<ColorGlyph>,
/// The global bounding box of the font.
pub bbox: Rect,
/// A mapping between glyph IDs and character indices in the `glyphs`
/// vector.
glyph_indices: HashMap<u16, usize>,
}
/// A single color glyph.
pub struct ColorGlyph {
/// The ID of the glyph.
pub gid: u16,
/// Instructions to draw the glyph.
pub instructions: content::Encoded,
}
impl ColorFontMap<()> {
/// Creates a new empty mapping
pub fn new() -> Self {
Self {
map: IndexMap::new(),
total_slice_count: 0,
resources: Resources::default(),
}
}
/// For a given glyph in a TTF font, give the ID of the Type3 font and the
/// index of the glyph inside of this Type3 font.
///
/// If this is the first occurrence of this glyph in this font, it will
/// start its encoding and add it to the list of known glyphs.
pub fn get(
&mut self,
options: &PdfOptions,
text: &TextItemView,
glyph: &Glyph,
) -> SourceResult<(usize, u8)> {
let font = &text.item.font;
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
let global_bbox = font.ttf().global_bounding_box();
let bbox = Rect::new(
font.to_em(global_bbox.x_min).to_font_units(),
font.to_em(global_bbox.y_min).to_font_units(),
font.to_em(global_bbox.x_max).to_font_units(),
font.to_em(global_bbox.y_max).to_font_units(),
);
ColorFont {
bbox,
slice_ids: Vec::new(),
glyphs: Vec::new(),
glyph_indices: HashMap::new(),
}
});
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&glyph.id) {
// If we already know this glyph, return it.
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
} else {
// Otherwise, allocate a new ColorGlyph in the font, and a new Type3 font
// if needed
let index = color_font.glyphs.len();
if index % 256 == 0 {
color_font.slice_ids.push(self.total_slice_count);
self.total_slice_count += 1;
}
let (frame, tofu) = glyph_frame(font, glyph.id);
if options.standards.pdfa && tofu {
bail!(failed_to_convert(text, glyph));
}
let width = font.advance(glyph.id).unwrap_or(Em::new(0.0)).get()
* font.units_per_em();
let instructions = content::build(
options,
&mut self.resources,
&frame,
None,
Some(width as f32),
)?;
color_font.glyphs.push(ColorGlyph { gid: glyph.id, instructions });
color_font.glyph_indices.insert(glyph.id, index);
(color_font.slice_ids[index / 256], index as u8)
})
}
/// Assign references to the resource dictionary used by this set of color
/// fonts.
pub fn with_refs(self, refs: &ResourcesRefs) -> ColorFontMap<Ref> {
ColorFontMap {
map: self.map,
resources: self.resources.with_refs(refs),
total_slice_count: self.total_slice_count,
}
}
}
impl<R> ColorFontMap<R> {
/// Iterate over all Type3 fonts.
///
/// Each item of this iterator maps to a Type3 font: it contains
/// at most 256 glyphs. A same TTF font can yield multiple Type3 fonts.
pub fn iter(&self) -> ColorFontMapIter<'_, R> {
ColorFontMapIter { map: self, font_index: 0, slice_index: 0 }
}
}
/// Iterator over a [`ColorFontMap`].
///
/// See [`ColorFontMap::iter`].
pub struct ColorFontMapIter<'a, R> {
/// The map over which to iterate
map: &'a ColorFontMap<R>,
/// The index of TTF font on which we currently iterate
font_index: usize,
/// The sub-font (slice of at most 256 glyphs) at which we currently are.
slice_index: usize,
}
impl<'a, R> Iterator for ColorFontMapIter<'a, R> {
type Item = (&'a ColorFont, ColorFontSlice);
fn next(&mut self) -> Option<Self::Item> {
let (font, color_font) = self.map.map.get_index(self.font_index)?;
let slice_count = (color_font.glyphs.len() / 256) + 1;
if self.slice_index >= slice_count {
self.font_index += 1;
self.slice_index = 0;
return self.next();
}
let slice = ColorFontSlice { font: font.clone(), subfont: self.slice_index };
self.slice_index += 1;
Some((color_font, slice))
}
}
/// A set of at most 256 glyphs (a limit imposed on Type3 fonts by the PDF
/// specification) that represents a part of a TTF font.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ColorFontSlice {
/// The original TTF font.
pub font: Font,
/// The index of the Type3 font, among all those that are necessary to
/// represent the subset of the TTF font we are interested in.
pub subfont: usize,
}
/// The error when the glyph could not be converted.
#[cold]
fn failed_to_convert(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
let mut diag = error!(
glyph.span.0,
"the glyph for {} could not be exported",
text.glyph_text(glyph).repr()
);
if text.item.font.ttf().tables().cff2.is_some() {
diag.hint("CFF2 fonts are not currently supported");
}
diag
}

View File

@ -1,823 +0,0 @@
//! Generic writer for PDF content.
//!
//! It is used to write page contents, color glyph instructions, and tilings.
//!
//! See also [`pdf_writer::Content`].
use ecow::eco_format;
use pdf_writer::types::{
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
};
use pdf_writer::writers::PositionedItems;
use pdf_writer::{Content, Finish, Name, Rect, Str};
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::Repr;
use typst_library::layout::{
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
};
use typst_library::model::Destination;
use typst_library::text::color::should_outline;
use typst_library::text::{Font, Glyph, TextItem, TextItemView};
use typst_library::visualize::{
Curve, CurveItem, FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint,
Shape,
};
use typst_syntax::Span;
use typst_utils::{Deferred, Numeric, SliceExt};
use crate::color::PaintEncode;
use crate::color_font::ColorFontMap;
use crate::extg::ExtGState;
use crate::image::deferred_image;
use crate::resources::Resources;
use crate::{deflate_deferred, AbsExt, ContentExt, EmExt, PdfOptions, StrExt};
/// Encode a [`Frame`] into a content stream.
///
/// The resources that were used in the stream will be added to `resources`.
///
/// `color_glyph_width` should be `None` unless the `Frame` represents a [color
/// glyph].
///
/// [color glyph]: `crate::color_font`
pub fn build(
options: &PdfOptions,
resources: &mut Resources<()>,
frame: &Frame,
fill: Option<Paint>,
color_glyph_width: Option<f32>,
) -> SourceResult<Encoded> {
let size = frame.size();
let mut ctx = Builder::new(options, resources, size);
if let Some(width) = color_glyph_width {
ctx.content.start_color_glyph(width);
}
// Make the coordinate system start at the top-left.
ctx.transform(
// Make the Y axis go upwards
Transform::scale(Ratio::one(), -Ratio::one())
// Also move the origin to the top left corner
.post_concat(Transform::translate(Abs::zero(), size.y)),
);
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
write_shape(&mut ctx, Point::zero(), &shape)?;
}
// Encode the frame into the content stream.
write_frame(&mut ctx, frame)?;
Ok(Encoded {
size,
content: deflate_deferred(ctx.content.finish()),
uses_opacities: ctx.uses_opacities,
links: ctx.links,
})
}
/// An encoded content stream.
pub struct Encoded {
/// The dimensions of the content.
pub size: Size,
/// The actual content stream.
pub content: Deferred<Vec<u8>>,
/// Whether the content opacities.
pub uses_opacities: bool,
/// Links in the PDF coordinate system.
pub links: Vec<(Destination, Rect)>,
}
/// An exporter for a single PDF content stream.
///
/// Content streams are a series of PDF commands. They can reference external
/// objects only through resources.
///
/// Content streams can be used for page contents, but also to describe color
/// glyphs and tilings.
pub struct Builder<'a, R = ()> {
/// Settings for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// A list of all resources that are used in the content stream.
pub(crate) resources: &'a mut Resources<R>,
/// The PDF content stream that is being built.
pub content: Content,
/// Current graphic state.
state: State,
/// Stack of saved graphic states.
saves: Vec<State>,
/// Whether any stroke or fill was not totally opaque.
uses_opacities: bool,
/// All clickable links that are present in this content.
links: Vec<(Destination, Rect)>,
}
impl<'a, R> Builder<'a, R> {
/// Create a new content builder.
pub fn new(
options: &'a PdfOptions<'a>,
resources: &'a mut Resources<R>,
size: Size,
) -> Self {
Builder {
options,
resources,
uses_opacities: false,
content: Content::new(),
state: State::new(size),
saves: vec![],
links: vec![],
}
}
}
/// A simulated graphics state used to deduplicate graphics state changes and
/// keep track of the current transformation matrix for link annotations.
#[derive(Debug, Clone)]
struct State {
/// The transform of the current item.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
size: Size,
/// The current font.
font: Option<(Font, Abs)>,
/// The current fill paint.
fill: Option<Paint>,
/// The color space of the current fill paint.
fill_space: Option<Name<'static>>,
/// The current external graphic state.
external_graphics_state: ExtGState,
/// The current stroke paint.
stroke: Option<FixedStroke>,
/// The color space of the current stroke paint.
stroke_space: Option<Name<'static>>,
/// The current text rendering mode.
text_rendering_mode: TextRenderingMode,
}
impl State {
/// Creates a new, clean state for a given `size`.
pub fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
size,
font: None,
fill: None,
fill_space: None,
external_graphics_state: ExtGState::default(),
stroke: None,
stroke_space: None,
text_rendering_mode: TextRenderingMode::Fill,
}
}
/// Creates the [`Transforms`] structure for the current item.
pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
Transforms {
transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
container_transform: self.container_transform,
container_size: self.size,
size,
}
}
}
/// Subset of the state used to calculate the transform of gradients and tilings.
#[derive(Debug, Clone, Copy)]
pub(super) struct Transforms {
/// The transform of the current item.
pub transform: Transform,
/// The transform of first hard frame in the hierarchy.
pub container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
pub container_size: Size,
/// The size of the item.
pub size: Size,
}
impl Builder<'_, ()> {
fn save_state(&mut self) -> SourceResult<()> {
self.saves.push(self.state.clone());
self.content.save_state_checked()
}
fn restore_state(&mut self) {
self.content.restore_state();
self.state = self.saves.pop().expect("missing state save");
}
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
let current_state = &self.state.external_graphics_state;
if current_state != graphics_state {
let index = self.resources.ext_gs.insert(*graphics_state);
let name = eco_format!("Gs{index}");
self.content.set_parameters(Name(name.as_bytes()));
self.state.external_graphics_state = *graphics_state;
if graphics_state.uses_opacities() {
self.uses_opacities = true;
}
}
}
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
let get_opacity = |paint: &Paint| {
let color = match paint {
Paint::Solid(color) => *color,
Paint::Gradient(_) | Paint::Tiling(_) => return 255,
};
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
};
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
let fill_opacity = fill.map_or(255, get_opacity);
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
}
fn reset_opacities(&mut self) {
self.set_external_graphics_state(&ExtGState {
stroke_opacity: 255,
fill_opacity: 255,
});
}
pub fn transform(&mut self, transform: Transform) {
let Transform { sx, ky, kx, sy, tx, ty } = transform;
self.state.transform = self.state.transform.pre_concat(transform);
if self.state.container_transform.is_identity() {
self.state.container_transform = self.state.transform;
}
self.content.transform([
sx.get() as _,
ky.get() as _,
kx.get() as _,
sy.get() as _,
tx.to_f32(),
ty.to_f32(),
]);
}
fn group_transform(&mut self, transform: Transform) {
self.state.container_transform =
self.state.container_transform.pre_concat(transform);
}
fn set_font(&mut self, font: &Font, size: Abs) {
if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
let index = self.resources.fonts.insert(font.clone());
let name = eco_format!("F{index}");
self.content.set_font(Name(name.as_bytes()), size.to_f32());
self.state.font = Some((font.clone(), size));
}
}
fn size(&mut self, size: Size) {
self.state.size = size;
}
fn set_fill(
&mut self,
fill: &Paint,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.fill.as_ref() != Some(fill)
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
{
fill.set_as_fill(self, on_text, transforms)?;
self.state.fill = Some(fill.clone());
}
Ok(())
}
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
if self.state.fill_space != Some(space) {
self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
self.state.fill_space = Some(space);
}
}
pub fn reset_fill_color_space(&mut self) {
self.state.fill_space = None;
}
fn set_stroke(
&mut self,
stroke: &FixedStroke,
on_text: bool,
transforms: Transforms,
) -> SourceResult<()> {
if self.state.stroke.as_ref() != Some(stroke)
|| matches!(
self.state.stroke.as_ref().map(|s| &s.paint),
Some(Paint::Gradient(_))
)
{
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
paint.set_as_stroke(self, on_text, transforms)?;
self.content.set_line_width(thickness.to_f32());
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
self.content.set_line_cap(to_pdf_line_cap(*cap));
}
if self.state.stroke.as_ref().map(|s| &s.join) != Some(join) {
self.content.set_line_join(to_pdf_line_join(*join));
}
if self.state.stroke.as_ref().map(|s| &s.dash) != Some(dash) {
if let Some(dash) = dash {
self.content.set_dash_pattern(
dash.array.iter().map(|l| l.to_f32()),
dash.phase.to_f32(),
);
} else {
self.content.set_dash_pattern([], 0.0);
}
}
if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
self.content.set_miter_limit(miter_limit.get() as f32);
}
self.state.stroke = Some(stroke.clone());
}
Ok(())
}
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
if self.state.stroke_space != Some(space) {
self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
self.state.stroke_space = Some(space);
}
}
pub fn reset_stroke_color_space(&mut self) {
self.state.stroke_space = None;
}
fn set_text_rendering_mode(&mut self, mode: TextRenderingMode) {
if self.state.text_rendering_mode != mode {
self.content.set_text_rendering_mode(mode);
self.state.text_rendering_mode = mode;
}
}
}
/// Encode a frame into the content stream.
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
for &(pos, ref item) in frame.items() {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
match item {
FrameItem::Group(group) => write_group(ctx, pos, group)?,
FrameItem::Text(text) => write_text(ctx, pos, text)?,
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
FrameItem::Image(image, size, span) => {
write_image(ctx, x, y, image, *size, *span)?
}
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
FrameItem::Tag(_) => {}
}
}
Ok(())
}
/// Encode a group into the content stream.
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
let translation = Transform::translate(pos.x, pos.y);
ctx.save_state()?;
if group.frame.kind().is_hard() {
ctx.group_transform(
ctx.state
.transform
.post_concat(ctx.state.container_transform.invert().unwrap())
.pre_concat(translation)
.pre_concat(group.transform),
);
ctx.size(group.frame.size());
}
ctx.transform(translation.pre_concat(group.transform));
if let Some(clip_curve) = &group.clip {
write_curve(ctx, 0.0, 0.0, clip_curve);
ctx.content.clip_nonzero();
ctx.content.end_path();
}
write_frame(ctx, &group.frame)?;
ctx.restore_state();
Ok(())
}
/// Encode a text run into the content stream.
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
if ctx.options.standards.pdfa && text.font.info().is_last_resort() {
bail!(
Span::find(text.glyphs.iter().map(|g| g.span.0)),
"the text {} could not be displayed with any font",
&text.text,
);
}
let outline_glyphs =
text.glyphs.iter().filter(|g| should_outline(&text.font, g)).count();
if outline_glyphs == text.glyphs.len() {
write_normal_text(ctx, pos, TextItemView::full(text))?;
} else if outline_glyphs == 0 {
write_complex_glyphs(ctx, pos, TextItemView::full(text))?;
} else {
// Otherwise we need to split it into smaller text runs.
let mut offset = 0;
let mut position_in_run = Abs::zero();
for (should_outline, sub_run) in
text.glyphs.group_by_key(|g| should_outline(&text.font, g))
{
let end = offset + sub_run.len();
// Build a sub text-run
let text_item_view = TextItemView::from_glyph_range(text, offset..end);
// Adjust the position of the run on the line
let pos = pos + Point::new(position_in_run, Abs::zero());
position_in_run += text_item_view.width();
offset = end;
// Actually write the sub text-run.
if should_outline {
write_normal_text(ctx, pos, text_item_view)?;
} else {
write_complex_glyphs(ctx, pos, text_item_view)?;
}
}
}
Ok(())
}
/// Encodes a text run (without any color glyph) into the content stream.
fn write_normal_text(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
*ctx.resources.languages.entry(text.item.lang).or_insert(0) += text.glyph_range.len();
let glyph_set = ctx.resources.glyph_sets.entry(text.item.font.clone()).or_default();
for g in text.glyphs() {
glyph_set.entry(g.id).or_insert_with(|| text.glyph_text(g));
}
let fill_transform = ctx.state.transforms(Size::zero(), pos);
ctx.set_fill(&text.item.fill, true, fill_transform)?;
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if let Some(stroke) = stroke {
ctx.set_stroke(stroke, true, fill_transform)?;
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
} else {
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
}
ctx.set_font(&text.item.font, text.item.size);
ctx.set_opacities(text.item.stroke.as_ref(), Some(&text.item.fill));
ctx.content.begin_text();
// Position the text.
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
let mut positioned = ctx.content.show_positioned();
let mut items = positioned.items();
let mut adjustment = Em::zero();
let mut encoded = vec![];
let glyph_remapper = ctx
.resources
.glyph_remappers
.entry(text.item.font.clone())
.or_default();
// Write the glyphs with kerning adjustments.
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
adjustment += glyph.x_offset;
if !adjustment.is_zero() {
if !encoded.is_empty() {
show_text(&mut items, &encoded);
encoded.clear();
}
items.adjust(-adjustment.to_font_units());
adjustment = Em::zero();
}
// In PDF, we use CIDs to index the glyphs in a font, not GIDs. What a
// CID actually refers to depends on the type of font we are embedding:
//
// - For TrueType fonts, the CIDs are defined by an external mapping.
// - For SID-keyed CFF fonts, the CID is the same as the GID in the font.
// - For CID-keyed CFF fonts, the CID refers to the CID in the font.
//
// (See in the PDF-spec for more details on this.)
//
// However, in our case:
// - We use the identity-mapping for TrueType fonts.
// - SID-keyed fonts will get converted into CID-keyed fonts by the
// subsetter.
// - CID-keyed fonts will be rewritten in a way so that the mapping
// between CID and GID is always the identity mapping, regardless of
// the mapping before.
//
// Because of this, we can always use the remapped GID as the CID,
// regardless of which type of font we are actually embedding.
let cid = glyph_remapper.remap(glyph.id);
encoded.push((cid >> 8) as u8);
encoded.push((cid & 0xff) as u8);
if let Some(advance) = text.item.font.advance(glyph.id) {
adjustment += glyph.x_advance - advance;
}
adjustment -= glyph.x_offset;
}
if !encoded.is_empty() {
show_text(&mut items, &encoded);
}
items.finish();
positioned.finish();
ctx.content.end_text();
Ok(())
}
/// Shows text, ensuring that each individual string doesn't exceed the
/// implementation limits.
fn show_text(items: &mut PositionedItems, encoded: &[u8]) {
for chunk in encoded.chunks(Str::PDFA_LIMIT) {
items.show(Str(chunk));
}
}
/// Encodes a text run made only of color glyphs into the content stream
fn write_complex_glyphs(
ctx: &mut Builder,
pos: Point,
text: TextItemView,
) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let mut last_font = None;
ctx.reset_opacities();
ctx.content.begin_text();
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
// So that the next call to ctx.set_font() will change the font to one that
// displays regular glyphs and not color glyphs.
ctx.state.font = None;
let glyph_set = ctx
.resources
.color_glyph_sets
.entry(text.item.font.clone())
.or_default();
for glyph in text.glyphs() {
if ctx.options.standards.pdfa && glyph.id == 0 {
bail!(tofu(&text, glyph));
}
// Retrieve the Type3 font reference and the glyph index in the font.
let color_fonts = ctx
.resources
.color_fonts
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
let (font, index) = color_fonts.get(ctx.options, &text, glyph)?;
if last_font != Some(font) {
ctx.content.set_font(
Name(eco_format!("Cf{}", font).as_bytes()),
text.item.size.to_f32(),
);
last_font = Some(font);
}
ctx.content.show(Str(&[index]));
glyph_set.entry(glyph.id).or_insert_with(|| text.glyph_text(glyph));
}
ctx.content.end_text();
Ok(())
}
/// Encode a geometrical shape into the content stream.
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
let x = pos.x.to_f32();
let y = pos.y.to_f32();
let stroke = shape.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
if shape.fill.is_none() && stroke.is_none() {
return Ok(());
}
if let Some(fill) = &shape.fill {
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
}
if let Some(stroke) = stroke {
ctx.set_stroke(
stroke,
false,
ctx.state.transforms(shape.geometry.bbox_size(), pos),
)?;
}
ctx.set_opacities(stroke, shape.fill.as_ref());
match &shape.geometry {
Geometry::Line(target) => {
let dx = target.x.to_f32();
let dy = target.y.to_f32();
ctx.content.move_to(x, y);
ctx.content.line_to(x + dx, y + dy);
}
Geometry::Rect(size) => {
let w = size.x.to_f32();
let h = size.y.to_f32();
if w.abs() > f32::EPSILON && h.abs() > f32::EPSILON {
ctx.content.rect(x, y, w, h);
}
}
Geometry::Curve(curve) => {
write_curve(ctx, x, y, curve);
}
}
match (&shape.fill, &shape.fill_rule, stroke) {
(None, _, None) => unreachable!(),
(Some(_), FillRule::NonZero, None) => ctx.content.fill_nonzero(),
(Some(_), FillRule::EvenOdd, None) => ctx.content.fill_even_odd(),
(None, _, Some(_)) => ctx.content.stroke(),
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
};
Ok(())
}
/// Encode a curve into the content stream.
fn write_curve(ctx: &mut Builder, x: f32, y: f32, curve: &Curve) {
for elem in &curve.0 {
match elem {
CurveItem::Move(p) => ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Line(p) => ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32()),
CurveItem::Cubic(p1, p2, p3) => ctx.content.cubic_to(
x + p1.x.to_f32(),
y + p1.y.to_f32(),
x + p2.x.to_f32(),
y + p2.y.to_f32(),
x + p3.x.to_f32(),
y + p3.y.to_f32(),
),
CurveItem::Close => ctx.content.close_path(),
};
}
}
/// Encode a vector or raster image into the content stream.
fn write_image(
ctx: &mut Builder,
x: f32,
y: f32,
image: &Image,
size: Size,
span: Span,
) -> SourceResult<()> {
let index = ctx.resources.images.insert(image.clone());
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
let (image, color_space) =
deferred_image(image.clone(), ctx.options.standards.pdfa);
if let Some(color_space) = color_space {
ctx.resources.colors.mark_as_used(color_space);
}
(image, span)
});
ctx.reset_opacities();
let name = eco_format!("Im{index}");
let w = size.x.to_f32();
let h = size.y.to_f32();
ctx.content.save_state_checked()?;
ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
if let Some(alt) = image.alt() {
if ctx.options.standards.pdfa && alt.len() > Str::PDFA_LIMIT {
bail!(span, "the image's alt text is too long");
}
let mut image_span =
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
let mut image_alt = image_span.properties();
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
image_alt.finish();
image_span.finish();
ctx.content.x_object(Name(name.as_bytes()));
ctx.content.end_marked_content();
} else {
ctx.content.x_object(Name(name.as_bytes()));
}
ctx.content.restore_state();
Ok(())
}
/// Save a link for later writing in the annotations dictionary.
fn write_link(ctx: &mut Builder, pos: Point, dest: &Destination, size: Size) {
let mut min_x = Abs::inf();
let mut min_y = Abs::inf();
let mut max_x = -Abs::inf();
let mut max_y = -Abs::inf();
// Compute the bounding box of the transformed link.
for point in [
pos,
pos + Point::with_x(size.x),
pos + Point::with_y(size.y),
pos + size.to_point(),
] {
let t = point.transform(ctx.state.transform);
min_x.set_min(t.x);
min_y.set_min(t.y);
max_x.set_max(t.x);
max_y.set_max(t.y);
}
let x1 = min_x.to_f32();
let x2 = max_x.to_f32();
let y1 = max_y.to_f32();
let y2 = min_y.to_f32();
let rect = Rect::new(x1, y1, x2, y2);
ctx.links.push((dest.clone(), rect));
}
fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
match cap {
LineCap::Butt => LineCapStyle::ButtCap,
LineCap::Round => LineCapStyle::RoundCap,
LineCap::Square => LineCapStyle::ProjectingSquareCap,
}
}
fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
match join {
LineJoin::Miter => LineJoinStyle::MiterJoin,
LineJoin::Round => LineJoinStyle::RoundJoin,
LineJoin::Bevel => LineJoinStyle::BevelJoin,
}
}
/// The error when there is a tofu glyph.
#[cold]
fn tofu(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
error!(
glyph.span.0,
"the text {} could not be displayed with any font",
text.glyph_text(glyph).repr(),
)
}

View File

@ -0,0 +1,661 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::num::NonZeroU64;
use ecow::{eco_format, EcoVec};
use krilla::annotation::Annotation;
use krilla::configure::{Configuration, ValidationError, Validator};
use krilla::destination::{NamedDestination, XyzDestination};
use krilla::embed::EmbedError;
use krilla::error::KrillaError;
use krilla::geom::PathBuilder;
use krilla::page::{PageLabel, PageSettings};
use krilla::surface::Surface;
use krilla::{Document, SerializeSettings};
use krilla_svg::render_svg_glyph;
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
use typst_library::foundations::NativeElement;
use typst_library::introspection::Location;
use typst_library::layout::{
Abs, Frame, FrameItem, GroupItem, PagedDocument, Size, Transform,
};
use typst_library::model::HeadingElem;
use typst_library::text::{Font, Lang};
use typst_library::visualize::{Geometry, Paint};
use typst_syntax::Span;
use crate::embed::embed_files;
use crate::image::handle_image;
use crate::link::handle_link;
use crate::metadata::build_metadata;
use crate::outline::build_outline;
use crate::page::PageLabelExt;
use crate::shape::handle_shape;
use crate::text::handle_text;
use crate::util::{convert_path, display_font, AbsExt, TransformExt};
use crate::PdfOptions;
#[typst_macros::time(name = "convert document")]
pub fn convert(
typst_document: &PagedDocument,
options: &PdfOptions,
) -> SourceResult<Vec<u8>> {
let settings = SerializeSettings {
compress_content_streams: true,
no_device_cs: true,
ascii_compatible: false,
xmp_metadata: true,
cmyk_profile: None,
configuration: options.standards.config,
enable_tagging: false,
render_svg_glyph_fn: render_svg_glyph,
};
let mut document = Document::new_with(settings);
let page_index_converter = PageIndexConverter::new(typst_document, options);
let named_destinations =
collect_named_destinations(typst_document, &page_index_converter);
let mut gc = GlobalContext::new(
typst_document,
options,
named_destinations,
page_index_converter,
);
convert_pages(&mut gc, &mut document)?;
embed_files(typst_document, &mut document)?;
document.set_outline(build_outline(&gc));
document.set_metadata(build_metadata(&gc));
finish(document, gc, options.standards.config)
}
fn convert_pages(gc: &mut GlobalContext, document: &mut Document) -> SourceResult<()> {
for (i, typst_page) in gc.document.pages.iter().enumerate() {
if gc.page_index_converter.pdf_page_index(i).is_none() {
// Don't export this page.
continue;
} else {
let mut settings = PageSettings::new(
typst_page.frame.width().to_f32(),
typst_page.frame.height().to_f32(),
);
if let Some(label) = typst_page
.numbering
.as_ref()
.and_then(|num| PageLabel::generate(num, typst_page.number))
.or_else(|| {
// When some pages were ignored from export, we show a page label with
// the correct real (not logical) page number.
// This is for consistency with normal output when pages have no numbering
// and all are exported: the final PDF page numbers always correspond to
// the real (not logical) page numbers. Here, the final PDF page number
// will differ, but we can at least use labels to indicate what was
// the corresponding real page number in the Typst document.
gc.page_index_converter
.has_skipped_pages()
.then(|| PageLabel::arabic((i + 1) as u64))
})
{
settings = settings.with_page_label(label);
}
let mut page = document.start_page_with(settings);
let mut surface = page.surface();
let mut fc = FrameContext::new(typst_page.frame.size());
handle_frame(
&mut fc,
&typst_page.frame,
typst_page.fill_or_transparent(),
&mut surface,
gc,
)?;
surface.finish();
for annotation in fc.annotations {
page.add_annotation(annotation);
}
}
}
Ok(())
}
/// A state allowing us to keep track of transforms and container sizes,
/// which is mainly needed to resolve gradients and patterns correctly.
#[derive(Debug, Clone)]
pub(crate) struct State {
/// The current transform.
transform: Transform,
/// The transform of first hard frame in the hierarchy.
container_transform: Transform,
/// The size of the first hard frame in the hierarchy.
container_size: Size,
}
impl State {
/// Creates a new, clean state for a given `size`.
fn new(size: Size) -> Self {
Self {
transform: Transform::identity(),
container_transform: Transform::identity(),
container_size: size,
}
}
pub(crate) fn register_container(&mut self, size: Size) {
self.container_transform = self.transform;
self.container_size = size;
}
pub(crate) fn pre_concat(&mut self, transform: Transform) {
self.transform = self.transform.pre_concat(transform);
}
pub(crate) fn transform(&self) -> Transform {
self.transform
}
pub(crate) fn container_transform(&self) -> Transform {
self.container_transform
}
pub(crate) fn container_size(&self) -> Size {
self.container_size
}
}
/// Context needed for converting a single frame.
pub(crate) struct FrameContext {
states: Vec<State>,
annotations: Vec<Annotation>,
}
impl FrameContext {
pub(crate) fn new(size: Size) -> Self {
Self {
states: vec![State::new(size)],
annotations: vec![],
}
}
pub(crate) fn push(&mut self) {
self.states.push(self.states.last().unwrap().clone());
}
pub(crate) fn pop(&mut self) {
self.states.pop();
}
pub(crate) fn state(&self) -> &State {
self.states.last().unwrap()
}
pub(crate) fn state_mut(&mut self) -> &mut State {
self.states.last_mut().unwrap()
}
pub(crate) fn push_annotation(&mut self, annotation: Annotation) {
self.annotations.push(annotation);
}
}
/// Globally needed context for converting a typst document.
pub(crate) struct GlobalContext<'a> {
/// Cache the conversion between krilla and Typst fonts (forward and backward).
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
/// Mapping between images and their span.
// Note: In theory, the same image can have multiple spans
// if it appears in the document multiple times. We just store the
// first appearance, though.
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
/// The spans of all images that appear in the document. We use this so
/// we can give more accurate error messages.
pub(crate) image_spans: HashSet<Span>,
/// The document to convert.
pub(crate) document: &'a PagedDocument,
/// Options for PDF export.
pub(crate) options: &'a PdfOptions<'a>,
/// Mapping between locations in the document and named destinations.
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
/// The languages used throughout the document.
pub(crate) languages: BTreeMap<Lang, usize>,
pub(crate) page_index_converter: PageIndexConverter,
}
impl<'a> GlobalContext<'a> {
pub(crate) fn new(
document: &'a PagedDocument,
options: &'a PdfOptions,
loc_to_names: HashMap<Location, NamedDestination>,
page_index_converter: PageIndexConverter,
) -> GlobalContext<'a> {
Self {
fonts_forward: HashMap::new(),
fonts_backward: HashMap::new(),
document,
options,
loc_to_names,
image_to_spans: HashMap::new(),
image_spans: HashSet::new(),
languages: BTreeMap::new(),
page_index_converter,
}
}
}
#[typst_macros::time(name = "handle page")]
pub(crate) fn handle_frame(
fc: &mut FrameContext,
frame: &Frame,
fill: Option<Paint>,
surface: &mut Surface,
gc: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
if frame.kind().is_hard() {
fc.state_mut().register_container(frame.size());
}
if let Some(fill) = fill {
let shape = Geometry::Rect(frame.size()).filled(fill);
handle_shape(fc, &shape, surface, gc, Span::detached())?;
}
for (point, item) in frame.items() {
fc.push();
fc.state_mut().pre_concat(Transform::translate(point.x, point.y));
match item {
FrameItem::Group(g) => handle_group(fc, g, surface, gc)?,
FrameItem::Text(t) => handle_text(fc, t, surface, gc)?,
FrameItem::Shape(s, span) => handle_shape(fc, s, surface, gc, *span)?,
FrameItem::Image(image, size, span) => {
handle_image(gc, fc, image, *size, surface, *span)?
}
FrameItem::Link(d, s) => handle_link(fc, gc, d, *s),
FrameItem::Tag(_) => {}
}
fc.pop();
}
fc.pop();
Ok(())
}
pub(crate) fn handle_group(
fc: &mut FrameContext,
group: &GroupItem,
surface: &mut Surface,
context: &mut GlobalContext,
) -> SourceResult<()> {
fc.push();
fc.state_mut().pre_concat(group.transform);
let clip_path = group
.clip
.as_ref()
.and_then(|p| {
let mut builder = PathBuilder::new();
convert_path(p, &mut builder);
builder.finish()
})
.and_then(|p| p.transform(fc.state().transform.to_krilla()));
if let Some(clip_path) = &clip_path {
surface.push_clip_path(clip_path, &krilla::paint::FillRule::NonZero);
}
handle_frame(fc, &group.frame, None, surface, context)?;
if clip_path.is_some() {
surface.pop();
}
fc.pop();
Ok(())
}
#[typst_macros::time(name = "finish export")]
/// Finish a krilla document and handle export errors.
fn finish(
document: Document,
gc: GlobalContext,
configuration: Configuration,
) -> SourceResult<Vec<u8>> {
let validator = configuration.validator();
match document.finish() {
Ok(r) => Ok(r),
Err(e) => match e {
KrillaError::Font(f, s) => {
let font_str = display_font(gc.fonts_backward.get(&f).unwrap());
bail!(
Span::detached(),
"failed to process font {font_str}: {s}";
hint: "make sure the font is valid";
hint: "the used font might be unsupported by Typst"
);
}
KrillaError::Validation(ve) => {
let errors = ve
.iter()
.map(|e| convert_error(&gc, validator, e))
.collect::<EcoVec<_>>();
Err(errors)
}
KrillaError::Image(_, loc) => {
let span = to_span(loc);
bail!(span, "failed to process image");
}
KrillaError::SixteenBitImage(image, _) => {
let span = gc.image_to_spans.get(&image).unwrap();
bail!(
*span, "16 bit images are not supported in this export mode";
hint: "convert the image to 8 bit instead"
)
}
},
}
}
/// Converts a krilla error into a Typst error.
fn convert_error(
gc: &GlobalContext,
validator: Validator,
error: &ValidationError,
) -> SourceDiagnostic {
let prefix = eco_format!("{} error:", validator.as_str());
match error {
ValidationError::TooLongString => error!(
Span::detached(),
"{prefix} a PDF string is longer than 32767 characters";
hint: "ensure title and author names are short enough"
),
// Should in theory never occur, as krilla always trims font names.
ValidationError::TooLongName => error!(
Span::detached(),
"{prefix} a PDF name is longer than 127 characters";
hint: "perhaps a font name is too long"
),
ValidationError::TooLongArray => error!(
Span::detached(),
"{prefix} a PDF array is longer than 8191 elements";
hint: "this can happen if you have a very long text in a single line"
),
ValidationError::TooLongDictionary => error!(
Span::detached(),
"{prefix} a PDF dictionary has more than 4095 entries";
hint: "try reducing the complexity of your document"
),
ValidationError::TooLargeFloat => error!(
Span::detached(),
"{prefix} a PDF floating point number is larger than the allowed limit";
hint: "try exporting with a higher PDF version"
),
ValidationError::TooManyIndirectObjects => error!(
Span::detached(),
"{prefix} the PDF has too many indirect objects";
hint: "reduce the size of your document"
),
// Can only occur if we have 27+ nested clip paths
ValidationError::TooHighQNestingLevel => error!(
Span::detached(),
"{prefix} the PDF has too high q nesting";
hint: "reduce the number of nested containers"
),
ValidationError::ContainsPostScript(loc) => error!(
to_span(*loc),
"{prefix} the PDF contains PostScript code";
hint: "conic gradients are not supported in this PDF standard"
),
ValidationError::MissingCMYKProfile => error!(
Span::detached(),
"{prefix} the PDF is missing a CMYK profile";
hint: "CMYK colors are not yet supported in this export mode"
),
ValidationError::ContainsNotDefGlyph(f, loc, text) => error!(
to_span(*loc),
"{prefix} the text '{text}' cannot be displayed using {}",
display_font(gc.fonts_backward.get(f).unwrap());
hint: "try using a different font"
),
ValidationError::InvalidCodepointMapping(_, _, cp, loc) => {
if let Some(c) = cp.map(|c| eco_format!("{:#06x}", c as u32)) {
let msg = if loc.is_some() {
"the PDF contains text with"
} else {
"the text contains"
};
error!(to_span(*loc), "{prefix} {msg} the disallowed codepoint {c}")
} else {
// I think this code path is in theory unreachable,
// but just to be safe.
let msg = if loc.is_some() {
"the PDF contains text with missing codepoints"
} else {
"the text was not mapped to a code point"
};
error!(
to_span(*loc),
"{prefix} {msg}";
hint: "for complex scripts like Arabic, it might not be \
possible to produce a compliant document"
)
}
}
ValidationError::UnicodePrivateArea(_, _, c, loc) => {
let code_point = eco_format!("{:#06x}", *c as u32);
let msg = if loc.is_some() { "the PDF" } else { "the text" };
error!(
to_span(*loc),
"{prefix} {msg} contains the codepoint {code_point}";
hint: "codepoints from the Unicode private area are \
forbidden in this export mode"
)
}
ValidationError::Transparency(loc) => {
let span = to_span(*loc);
let hint1 = "try exporting with a different standard that \
supports transparency";
if loc.is_some() {
if gc.image_spans.contains(&span) {
error!(
span, "{prefix} the image contains transparency";
hint: "{hint1}";
hint: "or convert the image to a non-transparent one";
hint: "you might have to convert SVGs into \
non-transparent bitmap images"
)
} else {
error!(
span, "{prefix} the used fill or stroke has transparency";
hint: "{hint1}";
hint: "or don't use colors with transparency in \
this export mode"
)
}
} else {
error!(
span, "{prefix} the PDF contains transparency";
hint: "{hint1}"
)
}
}
ValidationError::ImageInterpolation(loc) => {
let span = to_span(*loc);
if loc.is_some() {
error!(
span, "{prefix} the image has smooth scaling";
hint: "set the `scaling` attribute to `pixelated`"
)
} else {
error!(
span, "{prefix} an image in the PDF has smooth scaling";
hint: "set the `scaling` attribute of all images to `pixelated`"
)
}
}
ValidationError::EmbeddedFile(e, s) => {
// We always set the span for embedded files, so it cannot be detached.
let span = to_span(*s);
match e {
EmbedError::Existence => {
error!(
span, "{prefix} document contains an embedded file";
hint: "embedded files are not supported in this export mode"
)
}
EmbedError::MissingDate => {
error!(
span, "{prefix} document date is missing";
hint: "the document must have a date when embedding files";
hint: "`set document(date: none)` must not be used in this case"
)
}
EmbedError::MissingDescription => {
error!(span, "{prefix} the file description is missing")
}
EmbedError::MissingMimeType => {
error!(span, "{prefix} the file mime type is missing")
}
}
}
// The below errors cannot occur yet, only once Typst supports full PDF/A
// and PDF/UA. But let's still add a message just to be on the safe side.
ValidationError::MissingAnnotationAltText => error!(
Span::detached(),
"{prefix} missing annotation alt text";
hint: "please report this as a bug"
),
ValidationError::MissingAltText => error!(
Span::detached(),
"{prefix} missing alt text";
hint: "make sure your images and equations have alt text"
),
ValidationError::NoDocumentLanguage => error!(
Span::detached(),
"{prefix} missing document language";
hint: "set the language of the document"
),
// Needs to be set by typst-pdf.
ValidationError::MissingHeadingTitle => error!(
Span::detached(),
"{prefix} missing heading title";
hint: "please report this as a bug"
),
ValidationError::MissingDocumentOutline => error!(
Span::detached(),
"{prefix} missing document outline";
hint: "please report this as a bug"
),
ValidationError::MissingTagging => error!(
Span::detached(),
"{prefix} missing document tags";
hint: "please report this as a bug"
),
ValidationError::NoDocumentTitle => error!(
Span::detached(),
"{prefix} missing document title";
hint: "set the title of the document"
),
ValidationError::MissingDocumentDate => error!(
Span::detached(),
"{prefix} missing document date";
hint: "set the date of the document"
),
}
}
/// Convert a krilla location to a span.
fn to_span(loc: Option<krilla::surface::Location>) -> Span {
loc.map(|l| Span::from_raw(NonZeroU64::new(l).unwrap()))
.unwrap_or(Span::detached())
}
fn collect_named_destinations(
document: &PagedDocument,
pic: &PageIndexConverter,
) -> HashMap<Location, NamedDestination> {
let mut locs_to_names = HashMap::new();
// Find all headings that have a label and are the first among other
// headings with the same label.
let matches: Vec<_> = {
let mut seen = HashSet::new();
document
.introspector
.query(&HeadingElem::elem().select())
.iter()
.filter_map(|elem| elem.location().zip(elem.label()))
.filter(|&(_, label)| seen.insert(label))
.collect()
};
for (loc, label) in matches {
let pos = document.introspector.position(loc);
let index = pos.page.get() - 1;
// We are subtracting 10 because the position of links e.g. to headings is always at the
// baseline and if you link directly to it, the text will not be visible
// because it is right above.
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
// Only add named destination if page belonging to the position is exported.
if let Some(index) = pic.pdf_page_index(index) {
let named = NamedDestination::new(
label.resolve().to_string(),
XyzDestination::new(
index,
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
),
);
locs_to_names.insert(loc, named);
}
}
locs_to_names
}
pub(crate) struct PageIndexConverter {
page_indices: HashMap<usize, usize>,
skipped_pages: usize,
}
impl PageIndexConverter {
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
let mut page_indices = HashMap::new();
let mut skipped_pages = 0;
for i in 0..document.pages.len() {
if options
.page_ranges
.as_ref()
.is_some_and(|ranges| !ranges.includes_page_index(i))
{
skipped_pages += 1;
} else {
page_indices.insert(i, i - skipped_pages);
}
}
Self { page_indices, skipped_pages }
}
pub(crate) fn has_skipped_pages(&self) -> bool {
self.skipped_pages > 0
}
/// Get the PDF page index of a page index, if it's not excluded.
pub(crate) fn pdf_page_index(&self, page_index: usize) -> Option<usize> {
self.page_indices.get(&page_index).copied()
}
}

View File

@ -1,122 +1,54 @@
use std::collections::BTreeMap;
use std::sync::Arc;
use ecow::EcoString;
use pdf_writer::types::AssociationKind;
use pdf_writer::{Filter, Finish, Name, Ref, Str, TextStr};
use krilla::embed::{AssociationKind, EmbeddedFile};
use krilla::Document;
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::{NativeElement, Packed, StyleChain};
use typst_library::foundations::{NativeElement, StyleChain};
use typst_library::layout::PagedDocument;
use typst_library::pdf::{EmbedElem, EmbeddedFileRelationship};
use crate::catalog::{document_date, pdf_date};
use crate::{deflate, NameExt, PdfChunk, StrExt, WithGlobalRefs};
pub(crate) fn embed_files(
typst_doc: &PagedDocument,
document: &mut Document,
) -> SourceResult<()> {
let elements = typst_doc.introspector.query(&EmbedElem::elem().select());
/// Query for all [`EmbedElem`] and write them and their file specifications.
///
/// This returns a map of embedding names and references so that we can later
/// add them to the catalog's `/Names` dictionary.
pub fn write_embedded_files(
ctx: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, BTreeMap<EcoString, Ref>)> {
let mut chunk = PdfChunk::new();
let mut embedded_files = BTreeMap::default();
let elements = ctx.document.introspector.query(&EmbedElem::elem().select());
for elem in &elements {
if !ctx.options.standards.embedded_files {
// PDF/A-2 requires embedded files to be PDF/A-1 or PDF/A-2,
// which we don't currently check.
bail!(
elem.span(),
"file embeddings are not currently supported for PDF/A-2";
hint: "PDF/A-3 supports arbitrary embedded files"
);
}
let embed = elem.to_packed::<EmbedElem>().unwrap();
if embed.path.derived.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file path is too long");
}
let id = embed_file(ctx, &mut chunk, embed)?;
if embedded_files.insert(embed.path.derived.clone(), id).is_some() {
bail!(
elem.span(),
"duplicate embedded file for path `{}`", embed.path.derived;
hint: "embedded file paths must be unique",
);
}
}
Ok((chunk, embedded_files))
}
/// Write the embedded file stream and its file specification.
fn embed_file(
ctx: &WithGlobalRefs,
chunk: &mut PdfChunk,
embed: &Packed<EmbedElem>,
) -> SourceResult<Ref> {
let embedded_file_stream_ref = chunk.alloc.bump();
let file_spec_dict_ref = chunk.alloc.bump();
let data = embed.data.as_slice();
let compressed = deflate(data);
let mut embedded_file = chunk.embedded_file(embedded_file_stream_ref, &compressed);
embedded_file.filter(Filter::FlateDecode);
if let Some(mime_type) = embed.mime_type(StyleChain::default()) {
if mime_type.len() > Name::PDFA_LIMIT {
bail!(embed.span(), "embedded file MIME type is too long");
}
embedded_file.subtype(Name(mime_type.as_bytes()));
} else if ctx.options.standards.pdfa {
bail!(embed.span(), "embedded files must have a MIME type in PDF/A-3");
}
let mut params = embedded_file.params();
params.size(data.len() as i32);
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
params.modification_date(pdf_date);
} else if ctx.options.standards.pdfa {
bail!(
embed.span(),
"the document must have a date when embedding files in PDF/A-3";
hint: "`set document(date: none)` must not be used in this case"
);
}
params.finish();
embedded_file.finish();
let mut file_spec = chunk.file_spec(file_spec_dict_ref);
file_spec.path(Str(embed.path.derived.as_bytes()));
file_spec.unic_file(TextStr(&embed.path.derived));
file_spec
.insert(Name(b"EF"))
.dict()
.pair(Name(b"F"), embedded_file_stream_ref)
.pair(Name(b"UF"), embedded_file_stream_ref);
if ctx.options.standards.pdfa {
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
file_spec.association_kind(match embed.relationship(StyleChain::default()) {
Some(EmbeddedFileRelationship::Source) => AssociationKind::Source,
Some(EmbeddedFileRelationship::Data) => AssociationKind::Data,
Some(EmbeddedFileRelationship::Alternative) => AssociationKind::Alternative,
Some(EmbeddedFileRelationship::Supplement) => AssociationKind::Supplement,
let span = embed.span();
let derived_path = &embed.path.derived;
let path = derived_path.to_string();
let mime_type =
embed.mime_type(StyleChain::default()).clone().map(|s| s.to_string());
let description = embed
.description(StyleChain::default())
.clone()
.map(|s| s.to_string());
let association_kind = match embed.relationship(StyleChain::default()) {
None => AssociationKind::Unspecified,
});
Some(e) => match e {
EmbeddedFileRelationship::Source => AssociationKind::Source,
EmbeddedFileRelationship::Data => AssociationKind::Data,
EmbeddedFileRelationship::Alternative => AssociationKind::Alternative,
EmbeddedFileRelationship::Supplement => AssociationKind::Supplement,
},
};
let data: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(embed.data.clone());
let file = EmbeddedFile {
path,
mime_type,
description,
association_kind,
data: data.into(),
compress: true,
location: Some(span.into_raw().get()),
};
if document.embed_file(file).is_none() {
bail!(span, "attempted to embed file {derived_path} twice");
}
}
if let Some(description) = embed.description(StyleChain::default()) {
if description.len() > Str::PDFA_LIMIT {
bail!(embed.span(), "embedded file description is too long");
}
file_spec.description(TextStr(description));
}
Ok(file_spec_dict_ref)
Ok(())
}

View File

@ -1,53 +0,0 @@
use std::collections::HashMap;
use pdf_writer::Ref;
use typst_library::diag::SourceResult;
use crate::{PdfChunk, WithGlobalRefs};
/// A PDF external graphics state.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct ExtGState {
// In the range 0-255, needs to be divided before being written into the graphics state!
pub stroke_opacity: u8,
// In the range 0-255, needs to be divided before being written into the graphics state!
pub fill_opacity: u8,
}
impl Default for ExtGState {
fn default() -> Self {
Self { stroke_opacity: 255, fill_opacity: 255 }
}
}
impl ExtGState {
pub fn uses_opacities(&self) -> bool {
self.stroke_opacity != 255 || self.fill_opacity != 255
}
}
/// Embed all used external graphics states into the PDF.
pub fn write_graphic_states(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<ExtGState, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for external_gs in resources.ext_gs.items() {
if out.contains_key(external_gs) {
continue;
}
let id = chunk.alloc();
out.insert(*external_gs, id);
chunk
.ext_graphics(id)
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
}
Ok(())
})?;
Ok((chunk, out))
}

View File

@ -1,278 +0,0 @@
use std::collections::{BTreeMap, HashMap};
use std::hash::Hash;
use std::sync::Arc;
use ecow::{eco_format, EcoString};
use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
use pdf_writer::writers::{FontDescriptor, WMode};
use pdf_writer::{Chunk, Filter, Finish, Name, Rect, Ref, Str};
use subsetter::GlyphRemapper;
use ttf_parser::{name_id, GlyphId, Tag};
use typst_library::diag::{At, SourceResult};
use typst_library::text::Font;
use typst_syntax::Span;
use typst_utils::SliceExt;
use crate::{deflate, EmExt, NameExt, PdfChunk, WithGlobalRefs};
const CFF: Tag = Tag::from_bytes(b"CFF ");
const CFF2: Tag = Tag::from_bytes(b"CFF2");
const SUBSET_TAG_LEN: usize = 6;
const IDENTITY_H: &str = "Identity-H";
pub(crate) const CMAP_NAME: Name = Name(b"Custom");
pub(crate) const SYSTEM_INFO: SystemInfo = SystemInfo {
registry: Str(b"Adobe"),
ordering: Str(b"Identity"),
supplement: 0,
};
/// Embed all used fonts into the PDF.
#[typst_macros::time(name = "write fonts")]
pub fn write_fonts(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<Font, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for font in resources.fonts.items() {
if out.contains_key(font) {
continue;
}
let type0_ref = chunk.alloc();
let cid_ref = chunk.alloc();
let descriptor_ref = chunk.alloc();
let cmap_ref = chunk.alloc();
let data_ref = chunk.alloc();
out.insert(font.clone(), type0_ref);
let glyph_set = resources.glyph_sets.get(font).unwrap();
let glyph_remapper = resources.glyph_remappers.get(font).unwrap();
let ttf = font.ttf();
// Do we have a TrueType or CFF font?
//
// FIXME: CFF2 must be handled differently and requires PDF 2.0
// (or we have to convert it to CFF).
let is_cff = ttf
.raw_face()
.table(CFF)
.or_else(|| ttf.raw_face().table(CFF2))
.is_some();
let base_font = base_font_name(font, glyph_set);
let base_font_type0 = if is_cff {
eco_format!("{base_font}-{IDENTITY_H}")
} else {
base_font.clone()
};
// Write the base font object referencing the CID font.
chunk
.type0_font(type0_ref)
.base_font(Name(base_font_type0.as_bytes()))
.encoding_predefined(Name(IDENTITY_H.as_bytes()))
.descendant_font(cid_ref)
.to_unicode(cmap_ref);
// Write the CID font referencing the font descriptor.
let mut cid = chunk.cid_font(cid_ref);
cid.subtype(if is_cff { CidFontType::Type0 } else { CidFontType::Type2 });
cid.base_font(Name(base_font.as_bytes()));
cid.system_info(SYSTEM_INFO);
cid.font_descriptor(descriptor_ref);
cid.default_width(0.0);
if !is_cff {
cid.cid_to_gid_map_predefined(Name(b"Identity"));
}
// Extract the widths of all glyphs.
// `remapped_gids` returns an iterator over the old GIDs in their new sorted
// order, so we can append the widths as is.
let widths = glyph_remapper
.remapped_gids()
.map(|gid| {
let width = ttf.glyph_hor_advance(GlyphId(gid)).unwrap_or(0);
font.to_em(width).to_font_units()
})
.collect::<Vec<_>>();
// Write all non-zero glyph widths.
let mut first = 0;
let mut width_writer = cid.widths();
for (w, group) in widths.group_by_key(|&w| w) {
let end = first + group.len();
if w != 0.0 {
let last = end - 1;
width_writer.same(first as u16, last as u16, w);
}
first = end;
}
width_writer.finish();
cid.finish();
// Write the /ToUnicode character map, which maps glyph ids back to
// unicode codepoints to enable copying out of the PDF.
let cmap = create_cmap(glyph_set, glyph_remapper);
chunk
.cmap(cmap_ref, &cmap)
.writing_mode(WMode::Horizontal)
.filter(Filter::FlateDecode);
let subset = subset_font(font, glyph_remapper)
.map_err(|err| {
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
let name = postscript_name.as_deref().unwrap_or(&font.info().family);
eco_format!("failed to process font {name}: {err}")
})
.at(Span::detached())?;
let mut stream = chunk.stream(data_ref, &subset);
stream.filter(Filter::FlateDecode);
if is_cff {
stream.pair(Name(b"Subtype"), Name(b"CIDFontType0C"));
}
stream.finish();
let mut font_descriptor =
write_font_descriptor(&mut chunk, descriptor_ref, font, &base_font);
if is_cff {
font_descriptor.font_file3(data_ref);
} else {
font_descriptor.font_file2(data_ref);
}
}
Ok(())
})?;
Ok((chunk, out))
}
/// Writes a FontDescriptor dictionary.
pub fn write_font_descriptor<'a>(
pdf: &'a mut Chunk,
descriptor_ref: Ref,
font: &'a Font,
base_font: &str,
) -> FontDescriptor<'a> {
let ttf = font.ttf();
let metrics = font.metrics();
let serif = font
.find_name(name_id::POST_SCRIPT_NAME)
.is_some_and(|name| name.contains("Serif"));
let mut flags = FontFlags::empty();
flags.set(FontFlags::SERIF, serif);
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
flags.set(FontFlags::ITALIC, ttf.is_italic());
flags.insert(FontFlags::SYMBOLIC);
flags.insert(FontFlags::SMALL_CAP);
let global_bbox = ttf.global_bounding_box();
let bbox = Rect::new(
font.to_em(global_bbox.x_min).to_font_units(),
font.to_em(global_bbox.y_min).to_font_units(),
font.to_em(global_bbox.x_max).to_font_units(),
font.to_em(global_bbox.y_max).to_font_units(),
);
let italic_angle = ttf.italic_angle();
let ascender = metrics.ascender.to_font_units();
let descender = metrics.descender.to_font_units();
let cap_height = metrics.cap_height.to_font_units();
let stem_v = 10.0 + 0.244 * (f32::from(ttf.weight().to_number()) - 50.0);
// Write the font descriptor (contains metrics about the font).
let mut font_descriptor = pdf.font_descriptor(descriptor_ref);
font_descriptor
.name(Name(base_font.as_bytes()))
.flags(flags)
.bbox(bbox)
.italic_angle(italic_angle)
.ascent(ascender)
.descent(descender)
.cap_height(cap_height)
.stem_v(stem_v);
font_descriptor
}
/// Subset a font to the given glyphs.
///
/// - For a font with TrueType outlines, this produces the whole OpenType font.
/// - For a font with CFF outlines, this produces just the CFF font program.
///
/// In both cases, this returns the already compressed data.
#[comemo::memoize]
#[typst_macros::time(name = "subset font")]
fn subset_font(
font: &Font,
glyph_remapper: &GlyphRemapper,
) -> Result<Arc<Vec<u8>>, subsetter::Error> {
let data = font.data();
let subset = subsetter::subset(data, font.index(), glyph_remapper)?;
let mut data = subset.as_ref();
// Extract the standalone CFF font program if applicable.
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
if let Some(cff) = raw.table(CFF) {
data = cff;
}
Ok(Arc::new(deflate(data)))
}
/// Creates the base font name for a font with a specific glyph subset.
/// Consists of a subset tag and the PostScript name of the font.
///
/// Returns a string of length maximum 116, so that even with `-Identity-H`
/// added it does not exceed the maximum PDF/A name length of 127.
pub(crate) fn base_font_name<T: Hash>(font: &Font, glyphs: &T) -> EcoString {
const MAX_LEN: usize = Name::PDFA_LIMIT - REST_LEN;
const REST_LEN: usize = SUBSET_TAG_LEN + 1 + 1 + IDENTITY_H.len();
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
let name = postscript_name.as_deref().unwrap_or("unknown");
let trimmed = &name[..name.len().min(MAX_LEN)];
// Hash the full name (we might have trimmed) and the glyphs to produce
// a fairly unique subset tag.
let subset_tag = subset_tag(&(name, glyphs));
eco_format!("{subset_tag}+{trimmed}")
}
/// Produce a unique 6 letter tag for a glyph set.
pub(crate) fn subset_tag<T: Hash>(glyphs: &T) -> EcoString {
const BASE: u128 = 26;
let mut hash = typst_utils::hash128(&glyphs);
let mut letter = [b'A'; SUBSET_TAG_LEN];
for l in letter.iter_mut() {
*l = b'A' + (hash % BASE) as u8;
hash /= BASE;
}
std::str::from_utf8(&letter).unwrap().into()
}
/// Create a compressed `/ToUnicode` CMap.
#[comemo::memoize]
#[typst_macros::time(name = "create cmap")]
fn create_cmap(
glyph_set: &BTreeMap<u16, EcoString>,
glyph_remapper: &GlyphRemapper,
) -> Arc<Vec<u8>> {
// Produce a reverse mapping from glyphs' CIDs to unicode strings.
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
for (&g, text) in glyph_set.iter() {
// See commend in `write_normal_text` for why we can choose the CID this way.
let cid = glyph_remapper.get(g).unwrap();
if !text.is_empty() {
cmap.pair_with_multiple(cid, text.chars());
}
}
Arc::new(deflate(&cmap.finish()))
}

View File

@ -1,512 +0,0 @@
use std::collections::HashMap;
use std::f32::consts::{PI, TAU};
use std::sync::Arc;
use ecow::eco_format;
use pdf_writer::types::{ColorSpaceOperand, FunctionShadingType};
use pdf_writer::writers::StreamShadingType;
use pdf_writer::{Filter, Finish, Name, Ref};
use typst_library::diag::SourceResult;
use typst_library::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
use typst_library::visualize::{
Color, ColorSpace, Gradient, RatioOrAngle, RelativeTo, WeightedColor,
};
use typst_utils::Numeric;
use crate::color::{
self, check_cmyk_allowed, ColorSpaceExt, PaintEncode, QuantizedColor,
};
use crate::{content, deflate, transform_to_array, AbsExt, PdfChunk, WithGlobalRefs};
/// A unique-transform-aspect-ratio combination that will be encoded into the
/// PDF.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PdfGradient {
/// The transform to apply to the gradient.
pub transform: Transform,
/// The aspect ratio of the gradient.
/// Required for aspect ratio correction.
pub aspect_ratio: Ratio,
/// The gradient.
pub gradient: Gradient,
/// The corrected angle of the gradient.
pub angle: Angle,
}
/// Writes the actual gradients (shading patterns) to the PDF.
/// This is performed once after writing all pages.
pub fn write_gradients(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<PdfGradient, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for pdf_gradient in resources.gradients.items() {
if out.contains_key(pdf_gradient) {
continue;
}
let shading = chunk.alloc();
out.insert(pdf_gradient.clone(), shading);
let PdfGradient { transform, aspect_ratio, gradient, angle } = pdf_gradient;
let color_space = if gradient.space().hue_index().is_some() {
ColorSpace::Oklab
} else {
gradient.space()
};
if color_space == ColorSpace::Cmyk {
check_cmyk_allowed(context.options)?;
}
let mut shading_pattern = match &gradient {
Gradient::Linear(_) => {
let shading_function =
shading_function(gradient, &mut chunk, color_space);
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
let mut shading = shading_pattern.function_shading();
shading.shading_type(FunctionShadingType::Axial);
color::write(
color_space,
shading.color_space(),
&context.globals.color_functions,
);
let (mut sin, mut cos) = (angle.sin(), angle.cos());
// Scale to edges of unit square.
let factor = cos.abs() + sin.abs();
sin *= factor;
cos *= factor;
let (x1, y1, x2, y2): (f64, f64, f64, f64) = match angle.quadrant() {
Quadrant::First => (0.0, 0.0, cos, sin),
Quadrant::Second => (1.0, 0.0, cos + 1.0, sin),
Quadrant::Third => (1.0, 1.0, cos + 1.0, sin + 1.0),
Quadrant::Fourth => (0.0, 1.0, cos, sin + 1.0),
};
shading
.anti_alias(gradient.anti_alias())
.function(shading_function)
.coords([x1 as f32, y1 as f32, x2 as f32, y2 as f32])
.extend([true; 2]);
shading.finish();
shading_pattern
}
Gradient::Radial(radial) => {
let shading_function =
shading_function(gradient, &mut chunk, color_space_of(gradient));
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
let mut shading = shading_pattern.function_shading();
shading.shading_type(FunctionShadingType::Radial);
color::write(
color_space,
shading.color_space(),
&context.globals.color_functions,
);
shading
.anti_alias(gradient.anti_alias())
.function(shading_function)
.coords([
radial.focal_center.x.get() as f32,
radial.focal_center.y.get() as f32,
radial.focal_radius.get() as f32,
radial.center.x.get() as f32,
radial.center.y.get() as f32,
radial.radius.get() as f32,
])
.extend([true; 2]);
shading.finish();
shading_pattern
}
Gradient::Conic(_) => {
let vertices = compute_vertex_stream(gradient, *aspect_ratio);
let stream_shading_id = chunk.alloc();
let mut stream_shading =
chunk.chunk.stream_shading(stream_shading_id, &vertices);
color::write(
color_space,
stream_shading.color_space(),
&context.globals.color_functions,
);
let range = color_space.range();
stream_shading
.bits_per_coordinate(16)
.bits_per_component(16)
.bits_per_flag(8)
.shading_type(StreamShadingType::CoonsPatch)
.decode(
[0.0, 1.0, 0.0, 1.0].into_iter().chain(range.iter().copied()),
)
.anti_alias(gradient.anti_alias())
.filter(Filter::FlateDecode);
stream_shading.finish();
let mut shading_pattern = chunk.shading_pattern(shading);
shading_pattern.shading_ref(stream_shading_id);
shading_pattern
}
};
shading_pattern.matrix(transform_to_array(*transform));
}
Ok(())
})?;
Ok((chunk, out))
}
/// Writes an exponential or stitched function that expresses the gradient.
fn shading_function(
gradient: &Gradient,
chunk: &mut PdfChunk,
color_space: ColorSpace,
) -> Ref {
let function = chunk.alloc();
let mut functions = vec![];
let mut bounds = vec![];
let mut encode = vec![];
// Create the individual gradient functions for each pair of stops.
for window in gradient.stops_ref().windows(2) {
let (first, second) = (window[0], window[1]);
// If we have a hue index or are using Oklab, we will create several
// stops in-between to make the gradient smoother without interpolation
// issues with native color spaces.
let mut last_c = first.0;
if gradient.space().hue_index().is_some() {
for i in 0..=32 {
let t = i as f64 / 32.0;
let real_t = first.1.get() * (1.0 - t) + second.1.get() * t;
let c = gradient.sample(RatioOrAngle::Ratio(Ratio::new(real_t)));
functions.push(single_gradient(chunk, last_c, c, color_space));
bounds.push(real_t as f32);
encode.extend([0.0, 1.0]);
last_c = c;
}
}
bounds.push(second.1.get() as f32);
functions.push(single_gradient(chunk, first.0, second.0, color_space));
encode.extend([0.0, 1.0]);
}
// Special case for gradients with only two stops.
if functions.len() == 1 {
return functions[0];
}
// Remove the last bound, since it's not needed for the stitching function.
bounds.pop();
// Create the stitching function.
chunk
.stitching_function(function)
.domain([0.0, 1.0])
.range(color_space.range().iter().copied())
.functions(functions)
.bounds(bounds)
.encode(encode);
function
}
/// Writes an exponential function that expresses a single segment (between two
/// stops) of a gradient.
fn single_gradient(
chunk: &mut PdfChunk,
first_color: Color,
second_color: Color,
color_space: ColorSpace,
) -> Ref {
let reference = chunk.alloc();
chunk
.exponential_function(reference)
.range(color_space.range().iter().copied())
.c0(color_space.convert(first_color))
.c1(color_space.convert(second_color))
.domain([0.0, 1.0])
.n(1.0);
reference
}
impl PaintEncode for Gradient {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_fill_color_space();
let index = register_gradient(ctx, self, on_text, transforms);
let id = eco_format!("Gr{index}");
let name = Name(id.as_bytes());
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_fill_pattern(None, name);
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_stroke_color_space();
let index = register_gradient(ctx, self, on_text, transforms);
let id = eco_format!("Gr{index}");
let name = Name(id.as_bytes());
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_stroke_pattern(None, name);
Ok(())
}
}
/// Deduplicates a gradient to a named PDF resource.
fn register_gradient(
ctx: &mut content::Builder,
gradient: &Gradient,
on_text: bool,
mut transforms: content::Transforms,
) -> usize {
// Edge cases for strokes.
if transforms.size.x.is_zero() {
transforms.size.x = Abs::pt(1.0);
}
if transforms.size.y.is_zero() {
transforms.size.y = Abs::pt(1.0);
}
let size = match gradient.unwrap_relative(on_text) {
RelativeTo::Self_ => transforms.size,
RelativeTo::Parent => transforms.container_size,
};
let (offset_x, offset_y) = match gradient {
Gradient::Conic(conic) => (
-size.x * (1.0 - conic.center.x.get() / 2.0) / 2.0,
-size.y * (1.0 - conic.center.y.get() / 2.0) / 2.0,
),
_ => (Abs::zero(), Abs::zero()),
};
let rotation = gradient.angle().unwrap_or_else(Angle::zero);
let transform = match gradient.unwrap_relative(on_text) {
RelativeTo::Self_ => transforms.transform,
RelativeTo::Parent => transforms.container_transform,
};
let scale_offset = match gradient {
Gradient::Conic(_) => 4.0_f64,
_ => 1.0,
};
let pdf_gradient = PdfGradient {
aspect_ratio: size.aspect_ratio(),
transform: transform
.pre_concat(Transform::translate(
offset_x * scale_offset,
offset_y * scale_offset,
))
.pre_concat(Transform::scale(
Ratio::new(size.x.to_pt() * scale_offset),
Ratio::new(size.y.to_pt() * scale_offset),
)),
gradient: gradient.clone(),
angle: Gradient::correct_aspect_ratio(rotation, size.aspect_ratio()),
};
ctx.resources.colors.mark_as_used(color_space_of(gradient));
ctx.resources.gradients.insert(pdf_gradient)
}
/// Writes a single Coons Patch as defined in the PDF specification
/// to a binary vec.
///
/// Structure:
/// - flag: `u8`
/// - points: `[u16; 24]`
/// - colors: `[u16; 4*N]` (N = number of components)
fn write_patch(
target: &mut Vec<u8>,
t: f32,
t1: f32,
c0: &[u16],
c1: &[u16],
angle: Angle,
) {
let theta = -TAU * t + angle.to_rad() as f32 + PI;
let theta1 = -TAU * t1 + angle.to_rad() as f32 + PI;
let (cp1, cp2) =
control_point(Point::new(Abs::pt(0.5), Abs::pt(0.5)), 0.5, theta, theta1);
// Push the flag
target.push(0);
let p1 =
[u16::quantize(0.5, [0.0, 1.0]).to_be(), u16::quantize(0.5, [0.0, 1.0]).to_be()];
let p2 = [
u16::quantize(theta.cos(), [-1.0, 1.0]).to_be(),
u16::quantize(theta.sin(), [-1.0, 1.0]).to_be(),
];
let p3 = [
u16::quantize(theta1.cos(), [-1.0, 1.0]).to_be(),
u16::quantize(theta1.sin(), [-1.0, 1.0]).to_be(),
];
let cp1 = [
u16::quantize(cp1.x.to_f32(), [0.0, 1.0]).to_be(),
u16::quantize(cp1.y.to_f32(), [0.0, 1.0]).to_be(),
];
let cp2 = [
u16::quantize(cp2.x.to_f32(), [0.0, 1.0]).to_be(),
u16::quantize(cp2.y.to_f32(), [0.0, 1.0]).to_be(),
];
// Push the points
target.extend_from_slice(bytemuck::cast_slice(&[
p1, p1, p2, p2, cp1, cp2, p3, p3, p1, p1, p1, p1,
]));
// Push the colors.
let colors = [c0, c0, c1, c1]
.into_iter()
.flat_map(|c| c.iter().copied().map(u16::to_be_bytes))
.flatten();
target.extend(colors);
}
fn control_point(c: Point, r: f32, angle_start: f32, angle_end: f32) -> (Point, Point) {
let n = (TAU / (angle_end - angle_start)).abs();
let f = ((angle_end - angle_start) / n).tan() * 4.0 / 3.0;
let p1 = c + Point::new(
Abs::pt((r * angle_start.cos() - f * r * angle_start.sin()) as f64),
Abs::pt((r * angle_start.sin() + f * r * angle_start.cos()) as f64),
);
let p2 = c + Point::new(
Abs::pt((r * angle_end.cos() + f * r * angle_end.sin()) as f64),
Abs::pt((r * angle_end.sin() - f * r * angle_end.cos()) as f64),
);
(p1, p2)
}
#[comemo::memoize]
fn compute_vertex_stream(gradient: &Gradient, aspect_ratio: Ratio) -> Arc<Vec<u8>> {
let Gradient::Conic(conic) = gradient else { unreachable!() };
// Generated vertices for the Coons patches
let mut vertices = Vec::new();
// Correct the gradient's angle
let angle = Gradient::correct_aspect_ratio(conic.angle, aspect_ratio);
for window in conic.stops.windows(2) {
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
// Precision:
// - On an even color, insert a stop every 90deg
// - For a hue-based color space, insert 200 stops minimum
// - On any other, insert 20 stops minimum
let max_dt = if c0 == c1 {
0.25
} else if conic.space.hue_index().is_some() {
0.005
} else {
0.05
};
let encode_space = conic
.space
.hue_index()
.map(|_| ColorSpace::Oklab)
.unwrap_or(conic.space);
let mut t_x = t0.get();
let dt = (t1.get() - t0.get()).min(max_dt);
// Special casing for sharp gradients.
if t0 == t1 {
write_patch(
&mut vertices,
t0.get() as f32,
t1.get() as f32,
&encode_space.convert(c0),
&encode_space.convert(c1),
angle,
);
continue;
}
while t_x < t1.get() {
let t_next = (t_x + dt).min(t1.get());
// The current progress in the current window.
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
let c = Color::mix_iter(
[WeightedColor::new(c0, 1.0 - t(t_x)), WeightedColor::new(c1, t(t_x))],
conic.space,
)
.unwrap();
let c_next = Color::mix_iter(
[
WeightedColor::new(c0, 1.0 - t(t_next)),
WeightedColor::new(c1, t(t_next)),
],
conic.space,
)
.unwrap();
write_patch(
&mut vertices,
t_x as f32,
t_next as f32,
&encode_space.convert(c),
&encode_space.convert(c_next),
angle,
);
t_x = t_next;
}
}
Arc::new(deflate(&vertices))
}
fn color_space_of(gradient: &Gradient) -> ColorSpace {
if gradient.space().hue_index().is_some() {
ColorSpace::Oklab
} else {
gradient.space()
}
}

View File

@ -1,249 +1,244 @@
use std::collections::HashMap;
use std::io::Cursor;
use std::hash::{Hash, Hasher};
use std::sync::{Arc, OnceLock};
use ecow::eco_format;
use image::{DynamicImage, GenericImageView, Rgba};
use pdf_writer::{Chunk, Filter, Finish, Ref};
use typst_library::diag::{At, SourceResult, StrResult};
use image::{DynamicImage, EncodableLayout, GenericImageView, Rgba};
use krilla::image::{BitsPerComponent, CustomImage, ImageColorspace};
use krilla::surface::Surface;
use krilla_svg::{SurfaceExt, SvgSettings};
use typst_library::diag::{bail, SourceResult};
use typst_library::foundations::Smart;
use typst_library::layout::{Abs, Angle, Ratio, Size, Transform};
use typst_library::visualize::{
ColorSpace, ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat,
RasterImage, SvgImage,
ExchangeFormat, Image, ImageKind, ImageScaling, RasterFormat, RasterImage,
};
use typst_utils::Deferred;
use typst_syntax::Span;
use crate::{color, deflate, PdfChunk, WithGlobalRefs};
use crate::convert::{FrameContext, GlobalContext};
use crate::util::{SizeExt, TransformExt};
/// Embed all used images into the PDF.
#[typst_macros::time(name = "write images")]
pub fn write_images(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<Image, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
for (i, image) in resources.images.items().enumerate() {
if out.contains_key(image) {
continue;
#[typst_macros::time(name = "handle image")]
pub(crate) fn handle_image(
gc: &mut GlobalContext,
fc: &mut FrameContext,
image: &Image,
size: Size,
surface: &mut Surface,
span: Span,
) -> SourceResult<()> {
surface.push_transform(&fc.state().transform().to_krilla());
surface.set_location(span.into_raw().get());
let interpolate = image.scaling() == Smart::Custom(ImageScaling::Smooth);
if let Some(alt) = image.alt() {
surface.start_alt_text(alt);
}
let (handle, span) = resources.deferred_images.get(&i).unwrap();
let encoded = handle.wait().as_ref().map_err(Clone::clone).at(*span)?;
gc.image_spans.insert(span);
match encoded {
EncodedImage::Raster {
data,
filter,
color_space,
bits_per_component,
width,
height,
compressed_icc,
alpha,
interpolate,
} => {
let image_ref = chunk.alloc();
out.insert(image.clone(), image_ref);
match image.kind() {
ImageKind::Raster(raster) => {
let (exif_transform, new_size) = exif_transform(raster, size);
surface.push_transform(&exif_transform.to_krilla());
let mut image = chunk.chunk.image_xobject(image_ref, data);
image.filter(*filter);
image.width(*width as i32);
image.height(*height as i32);
image.bits_per_component(i32::from(*bits_per_component));
image.interpolate(*interpolate);
let image = match convert_raster(raster.clone(), interpolate) {
None => bail!(span, "failed to process image"),
Some(i) => i,
};
let mut icc_ref = None;
let space = image.color_space();
if compressed_icc.is_some() {
let id = chunk.alloc.bump();
space.icc_based(id);
icc_ref = Some(id);
} else {
color::write(
*color_space,
space,
&context.globals.color_functions,
if !gc.image_to_spans.contains_key(&image) {
gc.image_to_spans.insert(image.clone(), span);
}
surface.draw_image(image, new_size.to_krilla());
surface.pop();
}
ImageKind::Svg(svg) => {
surface.draw_svg(
svg.tree(),
size.to_krilla(),
SvgSettings { embed_text: true, ..Default::default() },
);
}
// Add a second gray-scale image containing the alpha values if
// this image has an alpha channel.
if let Some((alpha_data, alpha_filter)) = alpha {
let mask_ref = chunk.alloc.bump();
image.s_mask(mask_ref);
image.finish();
let mut mask = chunk.image_xobject(mask_ref, alpha_data);
mask.filter(*alpha_filter);
mask.width(*width as i32);
mask.height(*height as i32);
mask.color_space().device_gray();
mask.bits_per_component(i32::from(*bits_per_component));
mask.interpolate(*interpolate);
} else {
image.finish();
}
if let (Some(compressed_icc), Some(icc_ref)) =
(compressed_icc, icc_ref)
{
let mut stream = chunk.icc_profile(icc_ref, compressed_icc);
stream.filter(Filter::FlateDecode);
match color_space {
ColorSpace::Srgb => {
stream.n(3);
stream.alternate().srgb();
}
ColorSpace::D65Gray => {
stream.n(1);
stream.alternate().d65_gray();
}
_ => unimplemented!(),
}
}
}
EncodedImage::Svg(svg_chunk, id) => {
let mut map = HashMap::new();
svg_chunk.renumber_into(&mut chunk.chunk, |old| {
*map.entry(old).or_insert_with(|| chunk.alloc.bump())
});
out.insert(image.clone(), map[id]);
}
}
if image.alt().is_some() {
surface.end_alt_text();
}
surface.pop();
surface.reset_location();
Ok(())
})?;
Ok((chunk, out))
}
/// Creates a new PDF image from the given image.
///
/// Also starts the deferred encoding of the image.
struct Repr {
/// The original, underlying raster image.
raster: RasterImage,
/// The alpha channel of the raster image, if existing.
alpha_channel: OnceLock<Option<Vec<u8>>>,
/// A (potentially) converted version of the dynamic image stored `raster` that is
/// guaranteed to either be in luma8 or rgb8, and thus can be used for the
/// `color_channel` method of `CustomImage`.
actual_dynamic: OnceLock<Arc<DynamicImage>>,
}
/// A wrapper around `RasterImage` so that we can implement `CustomImage`.
#[derive(Clone)]
struct PdfImage(Arc<Repr>);
impl PdfImage {
pub fn new(raster: RasterImage) -> Self {
Self(Arc::new(Repr {
raster,
alpha_channel: OnceLock::new(),
actual_dynamic: OnceLock::new(),
}))
}
}
impl Hash for PdfImage {
fn hash<H: Hasher>(&self, state: &mut H) {
// `alpha_channel` and `actual_dynamic` are generated from the underlying `RasterImage`,
// so this is enough. Since `raster` is prehashed, this is also very cheap.
self.0.raster.hash(state);
}
}
impl CustomImage for PdfImage {
fn color_channel(&self) -> &[u8] {
self.0
.actual_dynamic
.get_or_init(|| {
let dynamic = self.0.raster.dynamic();
let channel_count = dynamic.color().channel_count();
match (dynamic.as_ref(), channel_count) {
// Pure luma8 or rgb8 image, can use it directly.
(DynamicImage::ImageLuma8(_), _) => dynamic.clone(),
(DynamicImage::ImageRgb8(_), _) => dynamic.clone(),
// Grey-scale image, convert to luma8.
(_, 1 | 2) => Arc::new(DynamicImage::ImageLuma8(dynamic.to_luma8())),
// Anything else, convert to rgb8.
_ => Arc::new(DynamicImage::ImageRgb8(dynamic.to_rgb8())),
}
})
.as_bytes()
}
fn alpha_channel(&self) -> Option<&[u8]> {
self.0
.alpha_channel
.get_or_init(|| {
self.0.raster.dynamic().color().has_alpha().then(|| {
self.0
.raster
.dynamic()
.pixels()
.map(|(_, _, Rgba([_, _, _, a]))| a)
.collect()
})
})
.as_ref()
.map(|v| &**v)
}
fn bits_per_component(&self) -> BitsPerComponent {
BitsPerComponent::Eight
}
fn size(&self) -> (u32, u32) {
(self.0.raster.width(), self.0.raster.height())
}
fn icc_profile(&self) -> Option<&[u8]> {
if matches!(
self.0.raster.dynamic().as_ref(),
DynamicImage::ImageLuma8(_)
| DynamicImage::ImageLumaA8(_)
| DynamicImage::ImageRgb8(_)
| DynamicImage::ImageRgba8(_)
) {
self.0.raster.icc().map(|b| b.as_bytes())
} else {
// In all other cases, the dynamic will be converted into RGB8 or LUMA8, so the ICC
// profile may become invalid, and thus we don't include it.
None
}
}
fn color_space(&self) -> ImageColorspace {
// Remember that we convert all images to either RGB or luma.
if self.0.raster.dynamic().color().has_color() {
ImageColorspace::Rgb
} else {
ImageColorspace::Luma
}
}
}
#[comemo::memoize]
pub fn deferred_image(
image: Image,
pdfa: bool,
) -> (Deferred<StrResult<EncodedImage>>, Option<ColorSpace>) {
let color_space = match image.kind() {
ImageKind::Raster(raster) if raster.icc().is_none() => {
Some(to_color_space(raster.dynamic().color()))
}
_ => None,
};
// PDF/A does not appear to allow interpolation.
// See https://github.com/typst/typst/issues/2942.
let interpolate = !pdfa && image.scaling() == Smart::Custom(ImageScaling::Smooth);
let deferred = Deferred::new(move || match image.kind() {
ImageKind::Raster(raster) => Ok(encode_raster_image(raster, interpolate)),
ImageKind::Svg(svg) => {
let (chunk, id) = encode_svg(svg, pdfa)
.map_err(|err| eco_format!("failed to convert SVG to PDF: {err}"))?;
Ok(EncodedImage::Svg(chunk, id))
}
fn convert_raster(
raster: RasterImage,
interpolate: bool,
) -> Option<krilla::image::Image> {
if let RasterFormat::Exchange(ExchangeFormat::Jpg) = raster.format() {
let image_data: Arc<dyn AsRef<[u8]> + Send + Sync> =
Arc::new(raster.data().clone());
let icc_profile = raster.icc().map(|i| {
let i: Arc<dyn AsRef<[u8]> + Send + Sync> = Arc::new(i.clone());
i
});
(deferred, color_space)
}
/// Encode an image with a suitable filter.
#[typst_macros::time(name = "encode raster image")]
fn encode_raster_image(image: &RasterImage, interpolate: bool) -> EncodedImage {
let dynamic = image.dynamic();
let color_space = to_color_space(dynamic.color());
let (filter, data, bits_per_component) =
if image.format() == RasterFormat::Exchange(ExchangeFormat::Jpg) {
let mut data = Cursor::new(vec![]);
dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
(Filter::DctDecode, data.into_inner(), 8)
} else {
// TODO: Encode flate streams with PNG-predictor?
let (data, bits_per_component) = match (dynamic, color_space) {
// RGB image.
(DynamicImage::ImageRgb8(rgb), _) => (deflate(rgb.as_raw()), 8),
// Grayscale image
(DynamicImage::ImageLuma8(luma), _) => (deflate(luma.as_raw()), 8),
(_, ColorSpace::D65Gray) => (deflate(dynamic.to_luma8().as_raw()), 8),
// Anything else
_ => (deflate(dynamic.to_rgb8().as_raw()), 8),
};
(Filter::FlateDecode, data, bits_per_component)
};
let compressed_icc = image.icc().map(|data| deflate(data));
let alpha = dynamic.color().has_alpha().then(|| encode_alpha(dynamic));
EncodedImage::Raster {
data,
filter,
color_space,
bits_per_component,
width: image.width(),
height: image.height(),
compressed_icc,
alpha,
krilla::image::Image::from_jpeg_with_icc(
image_data.into(),
icc_profile.map(|i| i.into()),
interpolate,
}
}
/// Encode an image's alpha channel if present.
#[typst_macros::time(name = "encode alpha")]
fn encode_alpha(image: &DynamicImage) -> (Vec<u8>, Filter) {
let pixels: Vec<_> = image.pixels().map(|(_, _, Rgba([_, _, _, a]))| a).collect();
(deflate(&pixels), Filter::FlateDecode)
}
/// Encode an SVG into a chunk of PDF objects.
#[typst_macros::time(name = "encode svg")]
fn encode_svg(
svg: &SvgImage,
pdfa: bool,
) -> Result<(Chunk, Ref), svg2pdf::ConversionError> {
svg2pdf::to_chunk(
svg.tree(),
svg2pdf::ConversionOptions { pdfa, ..Default::default() },
)
}
/// A pre-encoded image.
pub enum EncodedImage {
/// A pre-encoded rasterized image.
Raster {
/// The raw, pre-deflated image data.
data: Vec<u8>,
/// The filter to use for the image.
filter: Filter,
/// Which color space this image is encoded in.
color_space: ColorSpace,
/// How many bits of each color component are stored.
bits_per_component: u8,
/// The image's width.
width: u32,
/// The image's height.
height: u32,
/// The image's ICC profile, deflated, if any.
compressed_icc: Option<Vec<u8>>,
/// The alpha channel of the image, pre-deflated, if any.
alpha: Option<(Vec<u8>, Filter)>,
/// Whether image interpolation should be enabled.
interpolate: bool,
},
/// A vector graphic.
///
/// The chunk is the SVG converted to PDF objects.
Svg(Chunk, Ref),
}
/// Matches an [`image::ColorType`] to [`ColorSpace`].
fn to_color_space(color: image::ColorType) -> ColorSpace {
use image::ColorType::*;
match color {
L8 | La8 | L16 | La16 => ColorSpace::D65Gray,
Rgb8 | Rgba8 | Rgb16 | Rgba16 | Rgb32F | Rgba32F => ColorSpace::Srgb,
_ => unimplemented!(),
} else {
krilla::image::Image::from_custom(PdfImage::new(raster), interpolate)
}
}
fn exif_transform(image: &RasterImage, size: Size) -> (Transform, Size) {
let base = |hp: bool, vp: bool, mut base_ts: Transform, size: Size| {
if hp {
// Flip horizontally in-place.
base_ts = base_ts.pre_concat(
Transform::scale(-Ratio::one(), Ratio::one())
.pre_concat(Transform::translate(-size.x, Abs::zero())),
)
}
if vp {
// Flip vertically in-place.
base_ts = base_ts.pre_concat(
Transform::scale(Ratio::one(), -Ratio::one())
.pre_concat(Transform::translate(Abs::zero(), -size.y)),
)
}
base_ts
};
let no_flipping =
|hp: bool, vp: bool| (base(hp, vp, Transform::identity(), size), size);
let with_flipping = |hp: bool, vp: bool| {
let base_ts = Transform::rotate_at(Angle::deg(90.0), Abs::zero(), Abs::zero())
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()));
let inv_size = Size::new(size.y, size.x);
(base(hp, vp, base_ts, inv_size), inv_size)
};
match image.exif_rotation() {
Some(2) => no_flipping(true, false),
Some(3) => no_flipping(true, true),
Some(4) => no_flipping(false, true),
Some(5) => with_flipping(false, false),
Some(6) => with_flipping(true, false),
Some(7) => with_flipping(true, true),
Some(8) => with_flipping(false, true),
_ => no_flipping(false, false),
}
}

View File

@ -1,81 +1,33 @@
//! Exporting of Typst documents into PDFs.
//! Exporting Typst documents to PDF.
mod catalog;
mod color;
mod color_font;
mod content;
mod convert;
mod embed;
mod extg;
mod font;
mod gradient;
mod image;
mod named_destination;
mod link;
mod metadata;
mod outline;
mod page;
mod resources;
mod tiling;
mod paint;
mod shape;
mod text;
mod util;
pub use self::metadata::{Timestamp, Timezone};
use std::collections::{BTreeMap, HashMap};
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::ops::{Deref, DerefMut};
use base64::Engine;
use ecow::EcoString;
use pdf_writer::{Chunk, Name, Pdf, Ref, Str, TextStr};
use ecow::eco_format;
use serde::{Deserialize, Serialize};
use typst_library::diag::{bail, SourceResult, StrResult};
use typst_library::foundations::{Datetime, Smart};
use typst_library::layout::{Abs, Em, PageRanges, PagedDocument, Transform};
use typst_library::text::Font;
use typst_library::visualize::Image;
use typst_syntax::Span;
use typst_utils::Deferred;
use crate::catalog::write_catalog;
use crate::color::{alloc_color_functions_refs, ColorFunctionRefs};
use crate::color_font::{write_color_fonts, ColorFontSlice};
use crate::embed::write_embedded_files;
use crate::extg::{write_graphic_states, ExtGState};
use crate::font::write_fonts;
use crate::gradient::{write_gradients, PdfGradient};
use crate::image::write_images;
use crate::named_destination::{write_named_destinations, NamedDestinations};
use crate::page::{alloc_page_refs, traverse_pages, write_page_tree, EncodedPage};
use crate::resources::{
alloc_resources_refs, write_resource_dictionaries, Resources, ResourcesRefs,
};
use crate::tiling::{write_tilings, PdfTiling};
use typst_library::foundations::Smart;
use typst_library::layout::{PageRanges, PagedDocument};
/// Export a document into a PDF file.
///
/// Returns the raw bytes making up the PDF file.
#[typst_macros::time(name = "pdf")]
pub fn pdf(document: &PagedDocument, options: &PdfOptions) -> SourceResult<Vec<u8>> {
PdfBuilder::new(document, options)
.phase(|builder| builder.run(traverse_pages))?
.phase(|builder| {
Ok(GlobalRefs {
color_functions: builder.run(alloc_color_functions_refs)?,
pages: builder.run(alloc_page_refs)?,
resources: builder.run(alloc_resources_refs)?,
})
})?
.phase(|builder| {
Ok(References {
named_destinations: builder.run(write_named_destinations)?,
fonts: builder.run(write_fonts)?,
color_fonts: builder.run(write_color_fonts)?,
images: builder.run(write_images)?,
gradients: builder.run(write_gradients)?,
tilings: builder.run(write_tilings)?,
ext_gs: builder.run(write_graphic_states)?,
embedded_files: builder.run(write_embedded_files)?,
})
})?
.phase(|builder| builder.run(write_page_tree))?
.phase(|builder| builder.run(write_resource_dictionaries))?
.export_with(write_catalog)
convert::convert(document, options)
}
/// Settings for PDF export.
@ -103,82 +55,74 @@ pub struct PdfOptions<'a> {
pub standards: PdfStandards,
}
/// A timestamp with timezone information.
#[derive(Debug, Clone, Copy)]
pub struct Timestamp {
/// The datetime of the timestamp.
pub(crate) datetime: Datetime,
/// The timezone of the timestamp.
pub(crate) timezone: Timezone,
}
impl Timestamp {
/// Create a new timestamp with a given datetime and UTC suffix.
pub fn new_utc(datetime: Datetime) -> Self {
Self { datetime, timezone: Timezone::UTC }
}
/// Create a new timestamp with a given datetime, and a local timezone offset.
pub fn new_local(datetime: Datetime, whole_minute_offset: i32) -> Option<Self> {
let hour_offset = (whole_minute_offset / 60).try_into().ok()?;
// Note: the `%` operator in Rust is the remainder operator, not the
// modulo operator. The remainder operator can return negative results.
// We can simply apply `abs` here because we assume the `minute_offset`
// will have the same sign as `hour_offset`.
let minute_offset = (whole_minute_offset % 60).abs().try_into().ok()?;
match (hour_offset, minute_offset) {
// Only accept valid timezone offsets with `-23 <= hours <= 23`,
// and `0 <= minutes <= 59`.
(-23..=23, 0..=59) => Some(Self {
datetime,
timezone: Timezone::Local { hour_offset, minute_offset },
}),
_ => None,
}
}
}
/// A timezone.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Timezone {
/// The UTC timezone.
UTC,
/// The local timezone offset from UTC. And the `minute_offset` will have
/// same sign as `hour_offset`.
Local { hour_offset: i8, minute_offset: u8 },
}
/// Encapsulates a list of compatible PDF standards.
#[derive(Clone)]
pub struct PdfStandards {
/// For now, we simplify to just PDF/A. But it can be more fine-grained in
/// the future.
pub(crate) pdfa: bool,
/// Whether the standard allows for embedding any kind of file into the PDF.
/// We disallow this for PDF/A-2, since it only allows embedding
/// PDF/A-1 and PDF/A-2 documents.
pub(crate) embedded_files: bool,
/// Part of the PDF/A standard.
pub(crate) pdfa_part: Option<(i32, &'static str)>,
pub(crate) config: krilla::configure::Configuration,
}
impl PdfStandards {
/// Validates a list of PDF standards for compatibility and returns their
/// encapsulated representation.
pub fn new(list: &[PdfStandard]) -> StrResult<Self> {
let a2b = list.contains(&PdfStandard::A_2b);
let a3b = list.contains(&PdfStandard::A_3b);
use krilla::configure::{Configuration, PdfVersion, Validator};
if a2b && a3b {
bail!("PDF cannot conform to A-2B and A-3B at the same time")
let mut version: Option<PdfVersion> = None;
let mut set_version = |v: PdfVersion| -> StrResult<()> {
if let Some(prev) = version {
bail!(
"PDF cannot conform to {} and {} at the same time",
prev.as_str(),
v.as_str()
);
}
version = Some(v);
Ok(())
};
let mut validator = None;
let mut set_validator = |v: Validator| -> StrResult<()> {
if validator.is_some() {
bail!("Typst currently only supports one PDF substandard at a time");
}
validator = Some(v);
Ok(())
};
for standard in list {
match standard {
PdfStandard::V_1_4 => set_version(PdfVersion::Pdf14)?,
PdfStandard::V_1_5 => set_version(PdfVersion::Pdf15)?,
PdfStandard::V_1_6 => set_version(PdfVersion::Pdf16)?,
PdfStandard::V_1_7 => set_version(PdfVersion::Pdf17)?,
PdfStandard::V_2_0 => set_version(PdfVersion::Pdf20)?,
PdfStandard::A_1b => set_validator(Validator::A1_B)?,
PdfStandard::A_2b => set_validator(Validator::A2_B)?,
PdfStandard::A_2u => set_validator(Validator::A2_U)?,
PdfStandard::A_3b => set_validator(Validator::A3_B)?,
PdfStandard::A_3u => set_validator(Validator::A3_U)?,
PdfStandard::A_4 => set_validator(Validator::A4)?,
PdfStandard::A_4f => set_validator(Validator::A4F)?,
PdfStandard::A_4e => set_validator(Validator::A4E)?,
}
}
let pdfa = a2b || a3b;
Ok(Self {
pdfa,
embedded_files: !a2b,
pdfa_part: pdfa.then_some((if a2b { 2 } else { 3 }, "B")),
})
let config = match (version, validator) {
(Some(version), Some(validator)) => {
Configuration::new_with(validator, version).ok_or_else(|| {
eco_format!(
"{} is not compatible with {}",
version.as_str(),
validator.as_str()
)
})?
}
(Some(version), None) => Configuration::new_with_version(version),
(None, Some(validator)) => Configuration::new_with_validator(validator),
(None, None) => Configuration::new_with_version(PdfVersion::Pdf17),
};
Ok(Self { config })
}
}
@ -190,7 +134,10 @@ impl Debug for PdfStandards {
impl Default for PdfStandards {
fn default() -> Self {
Self { pdfa: false, embedded_files: true, pdfa_part: None }
use krilla::configure::{Configuration, PdfVersion};
Self {
config: Configuration::new_with_version(PdfVersion::Pdf17),
}
}
}
@ -201,531 +148,43 @@ impl Default for PdfStandards {
#[allow(non_camel_case_types)]
#[non_exhaustive]
pub enum PdfStandard {
/// PDF 1.4.
#[serde(rename = "1.4")]
V_1_4,
/// PDF 1.5.
#[serde(rename = "1.5")]
V_1_5,
/// PDF 1.5.
#[serde(rename = "1.6")]
V_1_6,
/// PDF 1.7.
#[serde(rename = "1.7")]
V_1_7,
/// PDF 2.0.
#[serde(rename = "2.0")]
V_2_0,
/// PDF/A-1b.
#[serde(rename = "a-1b")]
A_1b,
/// PDF/A-2b.
#[serde(rename = "a-2b")]
A_2b,
/// PDF/A-3b.
/// PDF/A-2u.
#[serde(rename = "a-2u")]
A_2u,
/// PDF/A-3u.
#[serde(rename = "a-3b")]
A_3b,
}
/// A struct to build a PDF following a fixed succession of phases.
///
/// This type uses generics to represent its current state. `S` (for "state") is
/// all data that was produced by the previous phases, that is now read-only.
///
/// Phase after phase, this state will be transformed. Each phase corresponds to
/// a call to the [eponymous function](`PdfBuilder::phase`) and produces a new
/// part of the state, that will be aggregated with all other information, for
/// consumption during the next phase.
///
/// In other words: this struct follows the **typestate pattern**. This prevents
/// you from using data that is not yet available, at the type level.
///
/// Each phase consists of processes, that can read the state of the previous
/// phases, and construct a part of the new state.
///
/// A final step, that has direct access to the global reference allocator and
/// PDF document, can be run with [`PdfBuilder::export_with`].
struct PdfBuilder<S> {
/// The context that has been accumulated so far.
state: S,
/// A global bump allocator.
alloc: Ref,
/// The PDF document that is being written.
pdf: Pdf,
}
/// The initial state: we are exploring the document, collecting all resources
/// that will be necessary later. The content of the pages is also built during
/// this phase.
struct WithDocument<'a> {
/// The Typst document that is exported.
document: &'a PagedDocument,
/// Settings for PDF export.
options: &'a PdfOptions<'a>,
}
/// At this point, resources were listed, but they don't have any reference
/// associated with them.
///
/// This phase allocates some global references.
struct WithResources<'a> {
document: &'a PagedDocument,
options: &'a PdfOptions<'a>,
/// The content of the pages encoded as PDF content streams.
///
/// The pages are at the index corresponding to their page number, but they
/// may be `None` if they are not in the range specified by
/// `exported_pages`.
pages: Vec<Option<EncodedPage>>,
/// The PDF resources that are used in the content of the pages.
resources: Resources<()>,
}
/// Global references.
struct GlobalRefs {
/// References for color conversion functions.
color_functions: ColorFunctionRefs,
/// Reference for pages.
///
/// Items of this vector are `None` if the corresponding page is not
/// exported.
pages: Vec<Option<Ref>>,
/// References for the resource dictionaries.
resources: ResourcesRefs,
}
impl<'a> From<(WithDocument<'a>, (Vec<Option<EncodedPage>>, Resources<()>))>
for WithResources<'a>
{
fn from(
(previous, (pages, resources)): (
WithDocument<'a>,
(Vec<Option<EncodedPage>>, Resources<()>),
),
) -> Self {
Self {
document: previous.document,
options: previous.options,
pages,
resources,
}
}
}
/// At this point, the resources have been collected, and global references have
/// been allocated.
///
/// We are now writing objects corresponding to resources, and giving them references,
/// that will be collected in [`References`].
struct WithGlobalRefs<'a> {
document: &'a PagedDocument,
options: &'a PdfOptions<'a>,
pages: Vec<Option<EncodedPage>>,
/// Resources are the same as in previous phases, but each dictionary now has a reference.
resources: Resources,
/// Global references that were just allocated.
globals: GlobalRefs,
}
impl<'a> From<(WithResources<'a>, GlobalRefs)> for WithGlobalRefs<'a> {
fn from((previous, globals): (WithResources<'a>, GlobalRefs)) -> Self {
Self {
document: previous.document,
options: previous.options,
pages: previous.pages,
resources: previous.resources.with_refs(&globals.resources),
globals,
}
}
}
/// The references that have been assigned to each object.
struct References {
/// List of named destinations, each with an ID.
named_destinations: NamedDestinations,
/// The IDs of written fonts.
fonts: HashMap<Font, Ref>,
/// The IDs of written color fonts.
color_fonts: HashMap<ColorFontSlice, Ref>,
/// The IDs of written images.
images: HashMap<Image, Ref>,
/// The IDs of written gradients.
gradients: HashMap<PdfGradient, Ref>,
/// The IDs of written tilings.
tilings: HashMap<PdfTiling, Ref>,
/// The IDs of written external graphics states.
ext_gs: HashMap<ExtGState, Ref>,
/// The names and references for embedded files.
embedded_files: BTreeMap<EcoString, Ref>,
}
/// At this point, the references have been assigned to all resources. The page
/// tree is going to be written, and given a reference. It is also at this point that
/// the page contents is actually written.
struct WithRefs<'a> {
document: &'a PagedDocument,
options: &'a PdfOptions<'a>,
globals: GlobalRefs,
pages: Vec<Option<EncodedPage>>,
resources: Resources,
/// References that were allocated for resources.
references: References,
}
impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
fn from((previous, references): (WithGlobalRefs<'a>, References)) -> Self {
Self {
document: previous.document,
options: previous.options,
globals: previous.globals,
pages: previous.pages,
resources: previous.resources,
references,
}
}
}
/// In this phase, we write resource dictionaries.
///
/// Each sub-resource gets its own isolated resource dictionary.
struct WithEverything<'a> {
document: &'a PagedDocument,
options: &'a PdfOptions<'a>,
globals: GlobalRefs,
pages: Vec<Option<EncodedPage>>,
resources: Resources,
references: References,
/// Reference that was allocated for the page tree.
page_tree_ref: Ref,
}
impl<'a> From<(WithEverything<'a>, ())> for WithEverything<'a> {
fn from((this, _): (WithEverything<'a>, ())) -> Self {
this
}
}
impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
fn from((previous, page_tree_ref): (WithRefs<'a>, Ref)) -> Self {
Self {
document: previous.document,
options: previous.options,
globals: previous.globals,
resources: previous.resources,
references: previous.references,
pages: previous.pages,
page_tree_ref,
}
}
}
impl<'a> PdfBuilder<WithDocument<'a>> {
/// Start building a PDF for a Typst document.
fn new(document: &'a PagedDocument, options: &'a PdfOptions<'a>) -> Self {
Self {
alloc: Ref::new(1),
pdf: Pdf::new(),
state: WithDocument { document, options },
}
}
}
impl<S> PdfBuilder<S> {
/// Start a new phase, and save its output in the global state.
fn phase<NS, B, O>(mut self, builder: B) -> SourceResult<PdfBuilder<NS>>
where
// New state
NS: From<(S, O)>,
// Builder
B: Fn(&mut Self) -> SourceResult<O>,
{
let output = builder(&mut self)?;
Ok(PdfBuilder {
state: NS::from((self.state, output)),
alloc: self.alloc,
pdf: self.pdf,
})
}
/// Run a step with the current state, merges its output into the PDF file,
/// and renumbers any references it returned.
fn run<P, O>(&mut self, process: P) -> SourceResult<O>
where
// Process
P: Fn(&S) -> SourceResult<(PdfChunk, O)>,
// Output
O: Renumber,
{
let (chunk, mut output) = process(&self.state)?;
// Allocate a final reference for each temporary one
let allocated = chunk.alloc.get() - TEMPORARY_REFS_START;
let offset = TEMPORARY_REFS_START - self.alloc.get();
// Merge the chunk into the PDF, using the new references
chunk.renumber_into(&mut self.pdf, |mut r| {
r.renumber(offset);
r
});
// Also update the references in the output
output.renumber(offset);
self.alloc = Ref::new(self.alloc.get() + allocated);
Ok(output)
}
/// Finalize the PDF export and returns the buffer representing the
/// document.
fn export_with<P>(mut self, process: P) -> SourceResult<Vec<u8>>
where
P: Fn(S, &mut Pdf, &mut Ref) -> SourceResult<()>,
{
process(self.state, &mut self.pdf, &mut self.alloc)?;
Ok(self.pdf.finish())
}
}
/// A reference or collection of references that can be re-numbered,
/// to become valid in a global scope.
trait Renumber {
/// Renumber this value by shifting any references it contains by `offset`.
fn renumber(&mut self, offset: i32);
}
impl Renumber for () {
fn renumber(&mut self, _offset: i32) {}
}
impl Renumber for Ref {
fn renumber(&mut self, offset: i32) {
if self.get() >= TEMPORARY_REFS_START {
*self = Ref::new(self.get() - offset);
}
}
}
impl<R: Renumber> Renumber for Vec<R> {
fn renumber(&mut self, offset: i32) {
for item in self {
item.renumber(offset);
}
}
}
impl<T: Eq + Hash, R: Renumber> Renumber for HashMap<T, R> {
fn renumber(&mut self, offset: i32) {
for v in self.values_mut() {
v.renumber(offset);
}
}
}
impl<T: Ord, R: Renumber> Renumber for BTreeMap<T, R> {
fn renumber(&mut self, offset: i32) {
for v in self.values_mut() {
v.renumber(offset);
}
}
}
impl<R: Renumber> Renumber for Option<R> {
fn renumber(&mut self, offset: i32) {
if let Some(r) = self {
r.renumber(offset)
}
}
}
impl<T, R: Renumber> Renumber for (T, R) {
fn renumber(&mut self, offset: i32) {
self.1.renumber(offset)
}
}
/// A portion of a PDF file.
struct PdfChunk {
/// The actual chunk.
chunk: Chunk,
/// A local allocator.
alloc: Ref,
}
/// Any reference below that value was already allocated before and
/// should not be rewritten. Anything above was allocated in the current
/// chunk, and should be remapped.
///
/// This is a constant (large enough to avoid collisions) and not
/// dependent on self.alloc to allow for better memoization of steps, if
/// needed in the future.
const TEMPORARY_REFS_START: i32 = 1_000_000_000;
/// A part of a PDF document.
impl PdfChunk {
/// Start writing a new part of the document.
fn new() -> Self {
PdfChunk {
chunk: Chunk::new(),
alloc: Ref::new(TEMPORARY_REFS_START),
}
}
/// Allocate a reference that is valid in the context of this chunk.
///
/// References allocated with this function should be [renumbered](`Renumber::renumber`)
/// before being used in other chunks. This is done automatically if these
/// references are stored in the global `PdfBuilder` state.
fn alloc(&mut self) -> Ref {
self.alloc.bump()
}
}
impl Deref for PdfChunk {
type Target = Chunk;
fn deref(&self) -> &Self::Target {
&self.chunk
}
}
impl DerefMut for PdfChunk {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.chunk
}
}
/// Compress data with the DEFLATE algorithm.
fn deflate(data: &[u8]) -> Vec<u8> {
const COMPRESSION_LEVEL: u8 = 6;
miniz_oxide::deflate::compress_to_vec_zlib(data, COMPRESSION_LEVEL)
}
/// Memoized and deferred version of [`deflate`] specialized for a page's content
/// stream.
#[comemo::memoize]
fn deflate_deferred(content: Vec<u8>) -> Deferred<Vec<u8>> {
Deferred::new(move || deflate(&content))
}
/// Create a base64-encoded hash of the value.
fn hash_base64<T: Hash>(value: &T) -> String {
base64::engine::general_purpose::STANDARD
.encode(typst_utils::hash128(value).to_be_bytes())
}
/// Additional methods for [`Abs`].
trait AbsExt {
/// Convert an to a number of points.
fn to_f32(self) -> f32;
}
impl AbsExt for Abs {
fn to_f32(self) -> f32 {
self.to_pt() as f32
}
}
/// Additional methods for [`Em`].
trait EmExt {
/// Convert an em length to a number of PDF font units.
fn to_font_units(self) -> f32;
}
impl EmExt for Em {
fn to_font_units(self) -> f32 {
1000.0 * self.get() as f32
}
}
trait NameExt<'a> {
/// The maximum length of a name in PDF/A.
const PDFA_LIMIT: usize = 127;
}
impl<'a> NameExt<'a> for Name<'a> {}
/// Additional methods for [`Str`].
trait StrExt<'a>: Sized {
/// The maximum length of a string in PDF/A.
const PDFA_LIMIT: usize = 32767;
/// Create a string that satisfies the constraints of PDF/A.
#[allow(unused)]
fn trimmed(string: &'a [u8]) -> Self;
}
impl<'a> StrExt<'a> for Str<'a> {
fn trimmed(string: &'a [u8]) -> Self {
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
}
}
/// Additional methods for [`TextStr`].
trait TextStrExt<'a>: Sized {
/// The maximum length of a string in PDF/A.
const PDFA_LIMIT: usize = Str::PDFA_LIMIT;
/// Create a text string that satisfies the constraints of PDF/A.
fn trimmed(string: &'a str) -> Self;
}
impl<'a> TextStrExt<'a> for TextStr<'a> {
fn trimmed(string: &'a str) -> Self {
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
}
}
/// Extension trait for [`Content`](pdf_writer::Content).
trait ContentExt {
fn save_state_checked(&mut self) -> SourceResult<()>;
}
impl ContentExt for pdf_writer::Content {
fn save_state_checked(&mut self) -> SourceResult<()> {
self.save_state();
if self.state_nesting_depth() > 28 {
bail!(
Span::detached(),
"maximum PDF grouping depth exceeding";
hint: "try to avoid excessive nesting of layout containers",
);
}
Ok(())
}
}
/// Convert to an array of floats.
fn transform_to_array(ts: Transform) -> [f32; 6] {
[
ts.sx.get() as f32,
ts.ky.get() as f32,
ts.kx.get() as f32,
ts.sy.get() as f32,
ts.tx.to_f32(),
ts.ty.to_f32(),
]
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_timestamp_new_local() {
let dummy_datetime = Datetime::from_ymd_hms(2024, 12, 17, 10, 10, 10).unwrap();
let test = |whole_minute_offset, expect_timezone| {
assert_eq!(
Timestamp::new_local(dummy_datetime, whole_minute_offset)
.unwrap()
.timezone,
expect_timezone
);
};
// Valid timezone offsets
test(0, Timezone::Local { hour_offset: 0, minute_offset: 0 });
test(480, Timezone::Local { hour_offset: 8, minute_offset: 0 });
test(-480, Timezone::Local { hour_offset: -8, minute_offset: 0 });
test(330, Timezone::Local { hour_offset: 5, minute_offset: 30 });
test(-210, Timezone::Local { hour_offset: -3, minute_offset: 30 });
test(-720, Timezone::Local { hour_offset: -12, minute_offset: 0 }); // AoE
// Corner cases
test(315, Timezone::Local { hour_offset: 5, minute_offset: 15 });
test(-225, Timezone::Local { hour_offset: -3, minute_offset: 45 });
test(1439, Timezone::Local { hour_offset: 23, minute_offset: 59 });
test(-1439, Timezone::Local { hour_offset: -23, minute_offset: 59 });
// Invalid timezone offsets
assert!(Timestamp::new_local(dummy_datetime, 1440).is_none());
assert!(Timestamp::new_local(dummy_datetime, -1440).is_none());
assert!(Timestamp::new_local(dummy_datetime, i32::MAX).is_none());
assert!(Timestamp::new_local(dummy_datetime, i32::MIN).is_none());
}
/// PDF/A-3u.
#[serde(rename = "a-3u")]
A_3u,
/// PDF/A-4.
#[serde(rename = "a-4")]
A_4,
/// PDF/A-4f.
#[serde(rename = "a-4f")]
A_4f,
/// PDF/A-4e.
#[serde(rename = "a-4e")]
A_4e,
}

View File

@ -0,0 +1,94 @@
use krilla::action::{Action, LinkAction};
use krilla::annotation::{LinkAnnotation, Target};
use krilla::destination::XyzDestination;
use krilla::geom::Rect;
use typst_library::layout::{Abs, Point, Size};
use typst_library::model::Destination;
use crate::convert::{FrameContext, GlobalContext};
use crate::util::{AbsExt, PointExt};
pub(crate) fn handle_link(
fc: &mut FrameContext,
gc: &mut GlobalContext,
dest: &Destination,
size: Size,
) {
let mut min_x = Abs::inf();
let mut min_y = Abs::inf();
let mut max_x = -Abs::inf();
let mut max_y = -Abs::inf();
let pos = Point::zero();
// Compute the bounding box of the transformed link.
for point in [
pos,
pos + Point::with_x(size.x),
pos + Point::with_y(size.y),
pos + size.to_point(),
] {
let t = point.transform(fc.state().transform());
min_x.set_min(t.x);
min_y.set_min(t.y);
max_x.set_max(t.x);
max_y.set_max(t.y);
}
let x1 = min_x.to_f32();
let x2 = max_x.to_f32();
let y1 = min_y.to_f32();
let y2 = max_y.to_f32();
let rect = Rect::from_ltrb(x1, y1, x2, y2).unwrap();
// TODO: Support quad points.
let pos = match dest {
Destination::Url(u) => {
fc.push_annotation(
LinkAnnotation::new(
rect,
None,
Target::Action(Action::Link(LinkAction::new(u.to_string()))),
)
.into(),
);
return;
}
Destination::Position(p) => *p,
Destination::Location(loc) => {
if let Some(nd) = gc.loc_to_names.get(loc) {
// If a named destination has been registered, it's already guaranteed to
// not point to an excluded page.
fc.push_annotation(
LinkAnnotation::new(
rect,
None,
Target::Destination(krilla::destination::Destination::Named(
nd.clone(),
)),
)
.into(),
);
return;
} else {
gc.document.introspector.position(*loc)
}
}
};
let page_index = pos.page.get() - 1;
if let Some(index) = gc.page_index_converter.pdf_page_index(page_index) {
fc.push_annotation(
LinkAnnotation::new(
rect,
None,
Target::Destination(krilla::destination::Destination::Xyz(
XyzDestination::new(index, pos.point.to_krilla()),
)),
)
.into(),
);
}
}

View File

@ -0,0 +1,184 @@
use ecow::EcoString;
use krilla::metadata::{Metadata, TextDirection};
use typst_library::foundations::{Datetime, Smart};
use typst_library::layout::Dir;
use typst_library::text::Lang;
use crate::convert::GlobalContext;
pub(crate) fn build_metadata(gc: &GlobalContext) -> Metadata {
let creator = format!("Typst {}", env!("CARGO_PKG_VERSION"));
let lang = gc.languages.iter().max_by_key(|(_, &count)| count).map(|(&l, _)| l);
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
TextDirection::RightToLeft
} else {
TextDirection::LeftToRight
};
let mut metadata = Metadata::new()
.creator(creator)
.keywords(gc.document.info.keywords.iter().map(EcoString::to_string).collect())
.authors(gc.document.info.author.iter().map(EcoString::to_string).collect());
let lang = gc.languages.iter().max_by_key(|(_, &count)| count).map(|(&l, _)| l);
if let Some(lang) = lang {
metadata = metadata.language(lang.as_str().to_string());
}
if let Some(title) = &gc.document.info.title {
metadata = metadata.title(title.to_string());
}
if let Some(subject) = &gc.document.info.description {
metadata = metadata.subject(subject.to_string());
}
if let Some(ident) = gc.options.ident.custom() {
metadata = metadata.document_id(ident.to_string());
}
// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
// (2) If the `document.date` is set to `auto` or not set, try to use the
// date from the options.
// (3) Otherwise, we don't write date metadata.
let (date, tz) = match (gc.document.info.date, gc.options.timestamp) {
(Smart::Custom(date), _) => (date, None),
(Smart::Auto, Some(timestamp)) => {
(Some(timestamp.datetime), Some(timestamp.timezone))
}
_ => (None, None),
};
if let Some(date) = date.and_then(|d| convert_date(d, tz)) {
metadata = metadata.creation_date(date);
}
metadata = metadata.text_direction(dir);
metadata
}
fn convert_date(
datetime: Datetime,
tz: Option<Timezone>,
) -> Option<krilla::metadata::DateTime> {
let year = datetime.year().filter(|&y| y >= 0)? as u16;
let mut kd = krilla::metadata::DateTime::new(year);
if let Some(month) = datetime.month() {
kd = kd.month(month);
}
if let Some(day) = datetime.day() {
kd = kd.day(day);
}
if let Some(h) = datetime.hour() {
kd = kd.hour(h);
}
if let Some(m) = datetime.minute() {
kd = kd.minute(m);
}
if let Some(s) = datetime.second() {
kd = kd.second(s);
}
match tz {
Some(Timezone::UTC) => kd = kd.utc_offset_hour(0).utc_offset_minute(0),
Some(Timezone::Local { hour_offset, minute_offset }) => {
kd = kd.utc_offset_hour(hour_offset).utc_offset_minute(minute_offset)
}
None => {}
}
Some(kd)
}
/// A timestamp with timezone information.
#[derive(Debug, Clone, Copy)]
pub struct Timestamp {
/// The datetime of the timestamp.
pub(crate) datetime: Datetime,
/// The timezone of the timestamp.
pub(crate) timezone: Timezone,
}
impl Timestamp {
/// Create a new timestamp with a given datetime and UTC suffix.
pub fn new_utc(datetime: Datetime) -> Self {
Self { datetime, timezone: Timezone::UTC }
}
/// Create a new timestamp with a given datetime, and a local timezone offset.
pub fn new_local(datetime: Datetime, whole_minute_offset: i32) -> Option<Self> {
let hour_offset = (whole_minute_offset / 60).try_into().ok()?;
// Note: the `%` operator in Rust is the remainder operator, not the
// modulo operator. The remainder operator can return negative results.
// We can simply apply `abs` here because we assume the `minute_offset`
// will have the same sign as `hour_offset`.
let minute_offset = (whole_minute_offset % 60).abs().try_into().ok()?;
match (hour_offset, minute_offset) {
// Only accept valid timezone offsets with `-23 <= hours <= 23`,
// and `0 <= minutes <= 59`.
(-23..=23, 0..=59) => Some(Self {
datetime,
timezone: Timezone::Local { hour_offset, minute_offset },
}),
_ => None,
}
}
}
/// A timezone.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Timezone {
/// The UTC timezone.
UTC,
/// The local timezone offset from UTC. And the `minute_offset` will have
/// same sign as `hour_offset`.
Local { hour_offset: i8, minute_offset: u8 },
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_timestamp_new_local() {
let dummy_datetime = Datetime::from_ymd_hms(2024, 12, 17, 10, 10, 10).unwrap();
let test = |whole_minute_offset, expect_timezone| {
assert_eq!(
Timestamp::new_local(dummy_datetime, whole_minute_offset)
.unwrap()
.timezone,
expect_timezone
);
};
// Valid timezone offsets
test(0, Timezone::Local { hour_offset: 0, minute_offset: 0 });
test(480, Timezone::Local { hour_offset: 8, minute_offset: 0 });
test(-480, Timezone::Local { hour_offset: -8, minute_offset: 0 });
test(330, Timezone::Local { hour_offset: 5, minute_offset: 30 });
test(-210, Timezone::Local { hour_offset: -3, minute_offset: 30 });
test(-720, Timezone::Local { hour_offset: -12, minute_offset: 0 }); // AoE
// Corner cases
test(315, Timezone::Local { hour_offset: 5, minute_offset: 15 });
test(-225, Timezone::Local { hour_offset: -3, minute_offset: 45 });
test(1439, Timezone::Local { hour_offset: 23, minute_offset: 59 });
test(-1439, Timezone::Local { hour_offset: -23, minute_offset: 59 });
// Invalid timezone offsets
assert!(Timestamp::new_local(dummy_datetime, 1440).is_none());
assert!(Timestamp::new_local(dummy_datetime, -1440).is_none());
assert!(Timestamp::new_local(dummy_datetime, i32::MAX).is_none());
assert!(Timestamp::new_local(dummy_datetime, i32::MIN).is_none());
}
}

View File

@ -1,86 +0,0 @@
use std::collections::{HashMap, HashSet};
use pdf_writer::writers::Destination;
use pdf_writer::{Ref, Str};
use typst_library::diag::SourceResult;
use typst_library::foundations::{Label, NativeElement};
use typst_library::introspection::Location;
use typst_library::layout::Abs;
use typst_library::model::HeadingElem;
use crate::{AbsExt, PdfChunk, Renumber, StrExt, WithGlobalRefs};
/// A list of destinations in the PDF document (a specific point on a specific
/// page), that have a name associated with them.
///
/// Typst creates a named destination for each heading in the document, that
/// will then be written in the document catalog. PDF readers can then display
/// them to show a clickable outline of the document.
#[derive(Default)]
pub struct NamedDestinations {
/// A map between elements and their associated labels
pub loc_to_dest: HashMap<Location, Label>,
/// A sorted list of all named destinations.
pub dests: Vec<(Label, Ref)>,
}
impl Renumber for NamedDestinations {
fn renumber(&mut self, offset: i32) {
for (_, reference) in &mut self.dests {
reference.renumber(offset);
}
}
}
/// Fills in the map and vector for named destinations and writes the indirect
/// destination objects.
pub fn write_named_destinations(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, NamedDestinations)> {
let mut chunk = PdfChunk::new();
let mut out = NamedDestinations::default();
let mut seen = HashSet::new();
// Find all headings that have a label and are the first among other
// headings with the same label.
let mut matches: Vec<_> = context
.document
.introspector
.query(&HeadingElem::elem().select())
.iter()
.filter_map(|elem| elem.location().zip(elem.label()))
.filter(|&(_, label)| seen.insert(label))
.collect();
// Named destinations must be sorted by key.
matches.sort_by_key(|&(_, label)| label.resolve());
for (loc, label) in matches {
// Don't encode named destinations that would exceed the limit. Those
// will instead be encoded as normal links.
if label.resolve().len() > Str::PDFA_LIMIT {
continue;
}
let pos = context.document.introspector.position(loc);
let index = pos.page.get() - 1;
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
if let Some((Some(page), Some(page_ref))) =
context.pages.get(index).zip(context.globals.pages.get(index))
{
let dest_ref = chunk.alloc();
let x = pos.point.x.to_f32();
let y = (page.content.size.y - y).to_f32();
out.dests.push((label, dest_ref));
out.loc_to_dest.insert(loc, label);
chunk
.indirect(dest_ref)
.start::<Destination>()
.page(*page_ref)
.xyz(x, y, None);
}
}
Ok((chunk, out))
}

View File

@ -1,18 +1,15 @@
use std::num::NonZeroUsize;
use pdf_writer::{Finish, Pdf, Ref, TextStr};
use krilla::destination::XyzDestination;
use krilla::outline::{Outline, OutlineNode};
use typst_library::foundations::{NativeElement, Packed, StyleChain};
use typst_library::layout::Abs;
use typst_library::model::HeadingElem;
use crate::{AbsExt, TextStrExt, WithEverything};
use crate::convert::GlobalContext;
use crate::util::AbsExt;
/// Construct the outline for the document.
pub(crate) fn write_outline(
chunk: &mut Pdf,
alloc: &mut Ref,
ctx: &WithEverything,
) -> Option<Ref> {
pub(crate) fn build_outline(gc: &GlobalContext) -> Outline {
let mut tree: Vec<HeadingNode> = vec![];
// Stores the level of the topmost skipped ancestor of the next bookmarked
@ -21,14 +18,14 @@ pub(crate) fn write_outline(
// Therefore, its next descendant must be added at its level, which is
// enforced in the manner shown below.
let mut last_skipped_level = None;
let elements = ctx.document.introspector.query(&HeadingElem::elem().select());
let elements = &gc.document.introspector.query(&HeadingElem::elem().select());
for elem in elements.iter() {
if let Some(page_ranges) = &ctx.options.page_ranges {
if let Some(page_ranges) = &gc.options.page_ranges {
if !page_ranges
.includes_page(ctx.document.introspector.page(elem.location().unwrap()))
.includes_page(gc.document.introspector.page(elem.location().unwrap()))
{
// Don't bookmark headings in non-exported pages
// Don't bookmark headings in non-exported pages.
continue;
}
}
@ -95,39 +92,15 @@ pub(crate) fn write_outline(
}
}
if tree.is_empty() {
return None;
let mut outline = Outline::new();
for child in convert_nodes(&tree, gc) {
outline.push_child(child);
}
let root_id = alloc.bump();
let start_ref = *alloc;
let len = tree.len();
let mut prev_ref = None;
for (i, node) in tree.iter().enumerate() {
prev_ref = Some(write_outline_item(
ctx,
chunk,
alloc,
node,
root_id,
prev_ref,
i + 1 == len,
));
}
chunk
.outline(root_id)
.first(start_ref)
.last(Ref::new(
alloc.get() - tree.last().map(|child| child.len() as i32).unwrap_or(1),
))
.count(tree.len() as i32);
Some(root_id)
outline
}
/// A heading in the outline panel.
#[derive(Debug)]
struct HeadingNode<'a> {
element: &'a Packed<HeadingElem>,
@ -149,73 +122,31 @@ impl<'a> HeadingNode<'a> {
}
}
fn len(&self) -> usize {
1 + self.children.iter().map(Self::len).sum::<usize>()
}
}
fn to_krilla(&self, gc: &GlobalContext) -> Option<OutlineNode> {
let loc = self.element.location().unwrap();
let title = self.element.body.plain_text().to_string();
let pos = gc.document.introspector.position(loc);
let page_index = pos.page.get() - 1;
/// Write an outline item and all its children.
fn write_outline_item(
ctx: &WithEverything,
chunk: &mut Pdf,
alloc: &mut Ref,
node: &HeadingNode,
parent_ref: Ref,
prev_ref: Option<Ref>,
is_last: bool,
) -> Ref {
let id = alloc.bump();
let next_ref = Ref::new(id.get() + node.len() as i32);
let mut outline = chunk.outline_item(id);
outline.parent(parent_ref);
if !is_last {
outline.next(next_ref);
}
if let Some(prev_rev) = prev_ref {
outline.prev(prev_rev);
}
if let Some(last_immediate_child) = node.children.last() {
outline.first(Ref::new(id.get() + 1));
outline.last(Ref::new(next_ref.get() - last_immediate_child.len() as i32));
outline.count(-(node.children.len() as i32));
}
outline.title(TextStr::trimmed(node.element.body.plain_text().trim()));
let loc = node.element.location().unwrap();
let pos = ctx.document.introspector.position(loc);
let index = pos.page.get() - 1;
// Don't link to non-exported pages.
if let Some((Some(page), Some(page_ref))) =
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
{
if let Some(index) = gc.page_index_converter.pdf_page_index(page_index) {
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
outline.dest().page(*page_ref).xyz(
pos.point.x.to_f32(),
(page.content.size.y - y).to_f32(),
None,
let dest = XyzDestination::new(
index,
krilla::geom::Point::from_xy(pos.point.x.to_f32(), y.to_f32()),
);
let mut outline_node = OutlineNode::new(title, dest);
for child in convert_nodes(&self.children, gc) {
outline_node.push_child(child);
}
outline.finish();
let mut prev_ref = None;
for (i, child) in node.children.iter().enumerate() {
prev_ref = Some(write_outline_item(
ctx,
chunk,
alloc,
child,
id,
prev_ref,
i + 1 == node.children.len(),
));
return Some(outline_node);
}
id
None
}
}
fn convert_nodes(nodes: &[HeadingNode], gc: &GlobalContext) -> Vec<OutlineNode> {
nodes.iter().flat_map(|node| node.to_krilla(gc)).collect()
}

View File

@ -1,248 +1,22 @@
use std::collections::HashMap;
use std::num::NonZeroU64;
use std::num::NonZeroUsize;
use ecow::EcoString;
use pdf_writer::types::{ActionType, AnnotationFlags, AnnotationType, NumberingStyle};
use pdf_writer::{Filter, Finish, Name, Rect, Ref, Str};
use typst_library::diag::SourceResult;
use typst_library::foundations::Label;
use typst_library::introspection::Location;
use typst_library::layout::{Abs, Page};
use typst_library::model::{Destination, Numbering};
use krilla::page::{NumberingStyle, PageLabel};
use typst_library::model::Numbering;
use crate::{
content, AbsExt, PdfChunk, PdfOptions, Resources, WithDocument, WithRefs,
WithResources,
};
/// Construct page objects.
#[typst_macros::time(name = "construct pages")]
#[allow(clippy::type_complexity)]
pub fn traverse_pages(
state: &WithDocument,
) -> SourceResult<(PdfChunk, (Vec<Option<EncodedPage>>, Resources<()>))> {
let mut resources = Resources::default();
let mut pages = Vec::with_capacity(state.document.pages.len());
let mut skipped_pages = 0;
for (i, page) in state.document.pages.iter().enumerate() {
if state
.options
.page_ranges
.as_ref()
.is_some_and(|ranges| !ranges.includes_page_index(i))
{
// Don't export this page.
pages.push(None);
skipped_pages += 1;
} else {
let mut encoded = construct_page(state.options, &mut resources, page)?;
encoded.label = page
.numbering
.as_ref()
.and_then(|num| PdfPageLabel::generate(num, page.number))
.or_else(|| {
// When some pages were ignored from export, we show a page label with
// the correct real (not logical) page number.
// This is for consistency with normal output when pages have no numbering
// and all are exported: the final PDF page numbers always correspond to
// the real (not logical) page numbers. Here, the final PDF page number
// will differ, but we can at least use labels to indicate what was
// the corresponding real page number in the Typst document.
(skipped_pages > 0).then(|| PdfPageLabel::arabic((i + 1) as u64))
});
pages.push(Some(encoded));
}
}
Ok((PdfChunk::new(), (pages, resources)))
}
/// Construct a page object.
#[typst_macros::time(name = "construct page")]
fn construct_page(
options: &PdfOptions,
out: &mut Resources<()>,
page: &Page,
) -> SourceResult<EncodedPage> {
Ok(EncodedPage {
content: content::build(
options,
out,
&page.frame,
page.fill_or_transparent(),
None,
)?,
label: None,
})
}
/// Allocate a reference for each exported page.
pub fn alloc_page_refs(
context: &WithResources,
) -> SourceResult<(PdfChunk, Vec<Option<Ref>>)> {
let mut chunk = PdfChunk::new();
let page_refs = context
.pages
.iter()
.map(|p| p.as_ref().map(|_| chunk.alloc()))
.collect();
Ok((chunk, page_refs))
}
/// Write the page tree.
pub fn write_page_tree(ctx: &WithRefs) -> SourceResult<(PdfChunk, Ref)> {
let mut chunk = PdfChunk::new();
let page_tree_ref = chunk.alloc.bump();
for i in 0..ctx.pages.len() {
let content_id = chunk.alloc.bump();
write_page(
&mut chunk,
ctx,
content_id,
page_tree_ref,
&ctx.references.named_destinations.loc_to_dest,
i,
);
}
let page_kids = ctx.globals.pages.iter().filter_map(Option::as_ref).copied();
chunk
.pages(page_tree_ref)
.count(page_kids.clone().count() as i32)
.kids(page_kids);
Ok((chunk, page_tree_ref))
}
/// Write a page tree node.
fn write_page(
chunk: &mut PdfChunk,
ctx: &WithRefs,
content_id: Ref,
page_tree_ref: Ref,
loc_to_dest: &HashMap<Location, Label>,
i: usize,
) {
let Some((page, page_ref)) = ctx.pages[i].as_ref().zip(ctx.globals.pages[i]) else {
// Page excluded from export.
return;
};
let mut annotations = Vec::with_capacity(page.content.links.len());
for (dest, rect) in &page.content.links {
let id = chunk.alloc();
annotations.push(id);
let mut annotation = chunk.annotation(id);
annotation.subtype(AnnotationType::Link).rect(*rect);
annotation.border(0.0, 0.0, 0.0, None).flags(AnnotationFlags::PRINT);
let pos = match dest {
Destination::Url(uri) => {
annotation
.action()
.action_type(ActionType::Uri)
.uri(Str(uri.as_bytes()));
continue;
}
Destination::Position(pos) => *pos,
Destination::Location(loc) => {
if let Some(key) = loc_to_dest.get(loc) {
annotation
.action()
.action_type(ActionType::GoTo)
// `key` must be a `Str`, not a `Name`.
.pair(Name(b"D"), Str(key.resolve().as_bytes()));
continue;
} else {
ctx.document.introspector.position(*loc)
}
}
};
let index = pos.page.get() - 1;
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
// Don't add links to non-exported pages.
if let Some((Some(page), Some(page_ref))) =
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
{
annotation
.action()
.action_type(ActionType::GoTo)
.destination()
.page(*page_ref)
.xyz(pos.point.x.to_f32(), (page.content.size.y - y).to_f32(), None);
}
}
let mut page_writer = chunk.page(page_ref);
page_writer.parent(page_tree_ref);
let w = page.content.size.x.to_f32();
let h = page.content.size.y.to_f32();
page_writer.media_box(Rect::new(0.0, 0.0, w, h));
page_writer.contents(content_id);
page_writer.pair(Name(b"Resources"), ctx.resources.reference);
if page.content.uses_opacities {
page_writer
.group()
.transparency()
.isolated(false)
.knockout(false)
.color_space()
.srgb();
}
page_writer.annotations(annotations);
page_writer.finish();
chunk
.stream(content_id, page.content.content.wait())
.filter(Filter::FlateDecode);
}
/// Specification for a PDF page label.
#[derive(Debug, Clone, PartialEq, Hash, Default)]
pub(crate) struct PdfPageLabel {
/// Can be any string or none. Will always be prepended to the numbering style.
pub prefix: Option<EcoString>,
/// Based on the numbering pattern.
///
/// If `None` or numbering is a function, the field will be empty.
pub style: Option<PdfPageLabelStyle>,
/// Offset for the page label start.
///
/// Describes where to start counting from when setting a style.
/// (Has to be greater or equal than 1)
pub offset: Option<NonZeroU64>,
}
/// A PDF page label number style.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum PdfPageLabelStyle {
/// Decimal arabic numerals (1, 2, 3).
Arabic,
/// Lowercase roman numerals (i, ii, iii).
LowerRoman,
/// Uppercase roman numerals (I, II, III).
UpperRoman,
/// Lowercase letters (`a` to `z` for the first 26 pages,
/// `aa` to `zz` and so on for the next).
LowerAlpha,
/// Uppercase letters (`A` to `Z` for the first 26 pages,
/// `AA` to `ZZ` and so on for the next).
UpperAlpha,
}
impl PdfPageLabel {
/// Create a new `PdfNumbering` from a `Numbering` applied to a page
pub(crate) trait PageLabelExt {
/// Create a new `PageLabel` from a `Numbering` applied to a page
/// number.
fn generate(numbering: &Numbering, number: u64) -> Option<PdfPageLabel> {
fn generate(numbering: &Numbering, number: u64) -> Option<PageLabel>;
/// Creates an arabic page label with the specified page number.
/// For example, this will display page label `11` when given the page
/// number 11.
fn arabic(number: u64) -> PageLabel;
}
impl PageLabelExt for PageLabel {
fn generate(numbering: &Numbering, number: u64) -> Option<PageLabel> {
{
let Numbering::Pattern(pat) = numbering else {
return None;
};
@ -252,8 +26,8 @@ impl PdfPageLabel {
// If there is a suffix, we cannot use the common style optimisation,
// since PDF does not provide a suffix field.
let style = if pat.suffix.is_empty() {
use krilla::page::NumberingStyle as Style;
use typst_library::model::NumberingKind as Kind;
use PdfPageLabelStyle as Style;
match kind {
Kind::Arabic => Some(Style::Arabic),
Kind::LowerRoman => Some(Style::LowerRoman),
@ -275,36 +49,16 @@ impl PdfPageLabel {
(!prefix.is_empty()).then(|| prefix.clone())
};
let offset = style.and(NonZeroU64::new(number));
Some(PdfPageLabel { prefix, style, offset })
let offset = style.and(number.try_into().ok().and_then(NonZeroUsize::new));
Some(PageLabel::new(style, prefix.map(|s| s.to_string()), offset))
}
}
/// Creates an arabic page label with the specified page number.
/// For example, this will display page label `11` when given the page
/// number 11.
fn arabic(number: u64) -> PdfPageLabel {
PdfPageLabel {
prefix: None,
style: Some(PdfPageLabelStyle::Arabic),
offset: NonZeroU64::new(number),
}
fn arabic(number: u64) -> PageLabel {
PageLabel::new(
Some(NumberingStyle::Arabic),
None,
number.try_into().ok().and_then(NonZeroUsize::new),
)
}
}
impl PdfPageLabelStyle {
pub fn to_pdf_numbering_style(self) -> NumberingStyle {
match self {
PdfPageLabelStyle::Arabic => NumberingStyle::Arabic,
PdfPageLabelStyle::LowerRoman => NumberingStyle::LowerRoman,
PdfPageLabelStyle::UpperRoman => NumberingStyle::UpperRoman,
PdfPageLabelStyle::LowerAlpha => NumberingStyle::LowerAlpha,
PdfPageLabelStyle::UpperAlpha => NumberingStyle::UpperAlpha,
}
}
}
/// Data for an exported page.
pub struct EncodedPage {
pub content: content::Encoded,
pub label: Option<PdfPageLabel>,
}

View File

@ -0,0 +1,379 @@
//! Convert paint types from typst to krilla.
use krilla::color::{self, cmyk, luma, rgb};
use krilla::num::NormalizedF32;
use krilla::paint::{
Fill, LinearGradient, Pattern, RadialGradient, SpreadMethod, Stop, Stroke,
StrokeDash, SweepGradient,
};
use krilla::surface::Surface;
use typst_library::diag::SourceResult;
use typst_library::layout::{Abs, Angle, Quadrant, Ratio, Size, Transform};
use typst_library::visualize::{
Color, ColorSpace, DashPattern, FillRule, FixedStroke, Gradient, Paint, RatioOrAngle,
RelativeTo, Tiling, WeightedColor,
};
use typst_utils::Numeric;
use crate::convert::{handle_frame, FrameContext, GlobalContext, State};
use crate::util::{AbsExt, FillRuleExt, LineCapExt, LineJoinExt, TransformExt};
pub(crate) fn convert_fill(
gc: &mut GlobalContext,
paint_: &Paint,
fill_rule_: FillRule,
on_text: bool,
surface: &mut Surface,
state: &State,
size: Size,
) -> SourceResult<Fill> {
let (paint, opacity) = convert_paint(gc, paint_, on_text, surface, state, size)?;
Ok(Fill {
paint,
rule: fill_rule_.to_krilla(),
opacity: NormalizedF32::new(opacity as f32 / 255.0).unwrap(),
})
}
pub(crate) fn convert_stroke(
fc: &mut GlobalContext,
stroke: &FixedStroke,
on_text: bool,
surface: &mut Surface,
state: &State,
size: Size,
) -> SourceResult<Stroke> {
let (paint, opacity) =
convert_paint(fc, &stroke.paint, on_text, surface, state, size)?;
Ok(Stroke {
paint,
width: stroke.thickness.to_f32(),
miter_limit: stroke.miter_limit.get() as f32,
line_join: stroke.join.to_krilla(),
line_cap: stroke.cap.to_krilla(),
opacity: NormalizedF32::new(opacity as f32 / 255.0).unwrap(),
dash: stroke.dash.as_ref().map(convert_dash),
})
}
fn convert_paint(
gc: &mut GlobalContext,
paint: &Paint,
on_text: bool,
surface: &mut Surface,
state: &State,
mut size: Size,
) -> SourceResult<(krilla::paint::Paint, u8)> {
// Edge cases for strokes.
if size.x.is_zero() {
size.x = Abs::pt(1.0);
}
if size.y.is_zero() {
size.y = Abs::pt(1.0);
}
match paint {
Paint::Solid(c) => {
let (c, a) = convert_solid(c);
Ok((c.into(), a))
}
Paint::Gradient(g) => Ok(convert_gradient(g, on_text, state, size)),
Paint::Tiling(p) => convert_pattern(gc, p, on_text, surface, state),
}
}
fn convert_solid(color: &Color) -> (color::Color, u8) {
match color.space() {
ColorSpace::D65Gray => {
let (c, a) = convert_luma(color);
(c.into(), a)
}
ColorSpace::Cmyk => (convert_cmyk(color).into(), 255),
// Convert all other colors in different colors spaces into RGB.
_ => {
let (c, a) = convert_rgb(color);
(c.into(), a)
}
}
}
fn convert_cmyk(color: &Color) -> cmyk::Color {
let components = color.to_space(ColorSpace::Cmyk).to_vec4_u8();
cmyk::Color::new(components[0], components[1], components[2], components[3])
}
fn convert_rgb(color: &Color) -> (rgb::Color, u8) {
let components = color.to_space(ColorSpace::Srgb).to_vec4_u8();
(rgb::Color::new(components[0], components[1], components[2]), components[3])
}
fn convert_luma(color: &Color) -> (luma::Color, u8) {
let components = color.to_space(ColorSpace::D65Gray).to_vec4_u8();
(luma::Color::new(components[0]), components[3])
}
fn convert_pattern(
gc: &mut GlobalContext,
pattern: &Tiling,
on_text: bool,
surface: &mut Surface,
state: &State,
) -> SourceResult<(krilla::paint::Paint, u8)> {
let transform = correct_transform(state, pattern.unwrap_relative(on_text));
let mut stream_builder = surface.stream_builder();
let mut surface = stream_builder.surface();
let mut fc = FrameContext::new(pattern.frame().size());
handle_frame(&mut fc, pattern.frame(), None, &mut surface, gc)?;
surface.finish();
let stream = stream_builder.finish();
let pattern = Pattern {
stream,
transform: transform.to_krilla(),
width: (pattern.size().x + pattern.spacing().x).to_pt() as _,
height: (pattern.size().y + pattern.spacing().y).to_pt() as _,
};
Ok((pattern.into(), 255))
}
fn convert_gradient(
gradient: &Gradient,
on_text: bool,
state: &State,
size: Size,
) -> (krilla::paint::Paint, u8) {
let size = match gradient.unwrap_relative(on_text) {
RelativeTo::Self_ => size,
RelativeTo::Parent => state.container_size(),
};
let angle = gradient.angle().unwrap_or_else(Angle::zero);
let base_transform = correct_transform(state, gradient.unwrap_relative(on_text));
let stops = convert_gradient_stops(gradient);
match &gradient {
Gradient::Linear(_) => {
let (x1, y1, x2, y2) = {
let (mut sin, mut cos) = (angle.sin(), angle.cos());
// Scale to edges of unit square.
let factor = cos.abs() + sin.abs();
sin *= factor;
cos *= factor;
match angle.quadrant() {
Quadrant::First => (0.0, 0.0, cos as f32, sin as f32),
Quadrant::Second => (1.0, 0.0, cos as f32 + 1.0, sin as f32),
Quadrant::Third => (1.0, 1.0, cos as f32 + 1.0, sin as f32 + 1.0),
Quadrant::Fourth => (0.0, 1.0, cos as f32, sin as f32 + 1.0),
}
};
let linear = LinearGradient {
x1,
y1,
x2,
y2,
// x and y coordinates are normalized, so need to scale by the size.
transform: base_transform
.pre_concat(Transform::scale(
Ratio::new(size.x.to_f32() as f64),
Ratio::new(size.y.to_f32() as f64),
))
.to_krilla(),
spread_method: SpreadMethod::Pad,
stops,
anti_alias: gradient.anti_alias(),
};
(linear.into(), 255)
}
Gradient::Radial(radial) => {
let radial = RadialGradient {
fx: radial.focal_center.x.get() as f32,
fy: radial.focal_center.y.get() as f32,
fr: radial.focal_radius.get() as f32,
cx: radial.center.x.get() as f32,
cy: radial.center.y.get() as f32,
cr: radial.radius.get() as f32,
transform: base_transform
.pre_concat(Transform::scale(
Ratio::new(size.x.to_f32() as f64),
Ratio::new(size.y.to_f32() as f64),
))
.to_krilla(),
spread_method: SpreadMethod::Pad,
stops,
anti_alias: gradient.anti_alias(),
};
(radial.into(), 255)
}
Gradient::Conic(conic) => {
// Correct the gradient's angle.
let cx = size.x.to_f32() * conic.center.x.get() as f32;
let cy = size.y.to_f32() * conic.center.y.get() as f32;
let actual_transform = base_transform
// Adjust for the angle.
.pre_concat(Transform::rotate_at(
angle,
Abs::pt(cx as f64),
Abs::pt(cy as f64),
))
// Default start point in krilla and typst are at the opposite side, so we need
// to flip it horizontally.
.pre_concat(Transform::scale_at(
-Ratio::one(),
Ratio::one(),
Abs::pt(cx as f64),
Abs::pt(cy as f64),
));
let sweep = SweepGradient {
cx,
cy,
start_angle: 0.0,
end_angle: 360.0,
transform: actual_transform.to_krilla(),
spread_method: SpreadMethod::Pad,
stops,
anti_alias: gradient.anti_alias(),
};
(sweep.into(), 255)
}
}
}
fn convert_gradient_stops(gradient: &Gradient) -> Vec<Stop> {
let mut stops = vec![];
let use_cmyk = gradient.stops().iter().all(|s| s.color.space() == ColorSpace::Cmyk);
let mut add_single = |color: &Color, offset: Ratio| {
let (color, opacity) = if use_cmyk {
(convert_cmyk(color).into(), 255)
} else {
let (c, a) = convert_rgb(color);
(c.into(), a)
};
let opacity = NormalizedF32::new((opacity as f32) / 255.0).unwrap();
let offset = NormalizedF32::new(offset.get() as f32).unwrap();
let stop = Stop { offset, color, opacity };
stops.push(stop);
};
// Convert stops.
match &gradient {
Gradient::Linear(_) | Gradient::Radial(_) => {
if let Some(s) = gradient.stops().first() {
add_single(&s.color, s.offset.unwrap());
}
// Create the individual gradient functions for each pair of stops.
for window in gradient.stops().windows(2) {
let (first, second) = (window[0], window[1]);
// If we have a hue index or are using Oklab, we will create several
// stops in-between to make the gradient smoother without interpolation
// issues with native color spaces.
if gradient.space().hue_index().is_some() {
for i in 0..=32 {
let t = i as f64 / 32.0;
let real_t = Ratio::new(
first.offset.unwrap().get() * (1.0 - t)
+ second.offset.unwrap().get() * t,
);
let c = gradient.sample(RatioOrAngle::Ratio(real_t));
add_single(&c, real_t);
}
}
add_single(&second.color, second.offset.unwrap());
}
}
Gradient::Conic(conic) => {
if let Some((c, t)) = conic.stops.first() {
add_single(c, *t);
}
for window in conic.stops.windows(2) {
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
// Precision:
// - On an even color, insert a stop every 90deg.
// - For a hue-based color space, insert 200 stops minimum.
// - On any other, insert 20 stops minimum.
let max_dt = if c0 == c1 {
0.25
} else if conic.space.hue_index().is_some() {
0.005
} else {
0.05
};
let mut t_x = t0.get();
let dt = (t1.get() - t0.get()).min(max_dt);
// Special casing for sharp gradients.
if t0 == t1 {
add_single(&c1, t1);
continue;
}
while t_x < t1.get() {
let t_next = (t_x + dt).min(t1.get());
// The current progress in the current window.
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
let c_next = Color::mix_iter(
[
WeightedColor::new(c0, 1.0 - t(t_next)),
WeightedColor::new(c1, t(t_next)),
],
conic.space,
)
.unwrap();
add_single(&c_next, Ratio::new(t_next));
t_x = t_next;
}
add_single(&c1, t1);
}
}
}
stops
}
fn convert_dash(dash: &DashPattern<Abs, Abs>) -> StrokeDash {
StrokeDash {
array: dash.array.iter().map(|e| e.to_f32()).collect(),
offset: dash.phase.to_f32(),
}
}
fn correct_transform(state: &State, relative: RelativeTo) -> Transform {
// In krilla, if we have a shape with a transform and a complex paint,
// then the paint will inherit the transform of the shape.
match relative {
// Because of the above, we don't need to apply an additional transform here.
RelativeTo::Self_ => Transform::identity(),
// Because of the above, we need to first reverse the transform that will be
// applied from the shape, and then re-apply the transform that is used for
// the next parent container.
RelativeTo::Parent => state
.transform()
.invert()
.unwrap()
.pre_concat(state.container_transform()),
}
}

View File

@ -1,349 +0,0 @@
//! PDF resources.
//!
//! Resources are defined in dictionaries. They map identifiers such as `Im0` to
//! a PDF reference. Each [content stream] is associated with a resource dictionary.
//! The identifiers defined in the resources can then be used in content streams.
//!
//! [content stream]: `crate::content`
use std::collections::{BTreeMap, HashMap};
use std::hash::Hash;
use ecow::{eco_format, EcoString};
use pdf_writer::{Dict, Finish, Name, Ref};
use subsetter::GlyphRemapper;
use typst_library::diag::{SourceResult, StrResult};
use typst_library::text::{Font, Lang};
use typst_library::visualize::Image;
use typst_syntax::Span;
use typst_utils::Deferred;
use crate::color::ColorSpaces;
use crate::color_font::ColorFontMap;
use crate::extg::ExtGState;
use crate::gradient::PdfGradient;
use crate::image::EncodedImage;
use crate::tiling::TilingRemapper;
use crate::{PdfChunk, Renumber, WithEverything, WithResources};
/// All the resources that have been collected when traversing the document.
///
/// This does not allocate references to resources, only track what was used
/// and deduplicate what can be deduplicated.
///
/// You may notice that this structure is a tree: [`TilingRemapper`] and
/// [`ColorFontMap`] (that are present in the fields of [`Resources`]),
/// themselves contain [`Resources`] (that will be called "sub-resources" from
/// now on). Because color glyphs and tilings are defined using content
/// streams, just like pages, they can refer to resources too, which are tracked
/// by the respective sub-resources.
///
/// Each instance of this structure will become a `/Resources` dictionary in
/// the final PDF. It is not possible to use a single shared dictionary for all
/// pages, tilings and color fonts, because if a resource is listed in its own
/// `/Resources` dictionary, some PDF readers will fail to open the document.
///
/// Because we need to lazily initialize sub-resources (we don't know how deep
/// the tree will be before reading the document), and that this is done in a
/// context where no PDF reference allocator is available, `Resources` are
/// originally created with the type parameter `R = ()`. The reference for each
/// dictionary will only be allocated in the next phase, once we know the shape
/// of the tree, at which point `R` becomes `Ref`. No other value of `R` should
/// ever exist.
pub struct Resources<R = Ref> {
/// The global reference to this resource dictionary, or `()` if it has not
/// been allocated yet.
pub reference: R,
/// Handles color space writing.
pub colors: ColorSpaces,
/// Deduplicates fonts used across the document.
pub fonts: Remapper<Font>,
/// Deduplicates images used across the document.
pub images: Remapper<Image>,
/// Handles to deferred image conversions.
pub deferred_images: HashMap<usize, (Deferred<StrResult<EncodedImage>>, Span)>,
/// Deduplicates gradients used across the document.
pub gradients: Remapper<PdfGradient>,
/// Deduplicates tilings used across the document.
pub tilings: Option<Box<TilingRemapper<R>>>,
/// Deduplicates external graphics states used across the document.
pub ext_gs: Remapper<ExtGState>,
/// Deduplicates color glyphs.
pub color_fonts: Option<Box<ColorFontMap<R>>>,
// The fields below do not correspond to actual resources that will be
// written in a dictionary, but are more meta-data about resources that
// can't really live somewhere else.
/// The number of glyphs for all referenced languages in the content stream.
/// We keep track of this to determine the main document language.
/// BTreeMap is used to write sorted list of languages to metadata.
pub languages: BTreeMap<Lang, usize>,
/// For each font a mapping from used glyphs to their text representation.
/// This is used for the PDF's /ToUnicode map, and important for copy-paste
/// and searching.
///
/// Note that the text representation may contain multiple chars in case of
/// ligatures or similar things, and it may have no entry in the font's cmap
/// (or only a private-use codepoint), like the “Th” in Linux Libertine.
///
/// A glyph may have multiple entries in the font's cmap, and even the same
/// glyph can have a different text representation within one document.
/// But /ToUnicode does not support that, so we just save the first occurrence.
pub glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
/// Same as `glyph_sets`, but for color fonts.
pub color_glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
/// Stores the glyph remapper for each font for the subsetter.
pub glyph_remappers: HashMap<Font, GlyphRemapper>,
}
impl<R: Renumber> Renumber for Resources<R> {
fn renumber(&mut self, offset: i32) {
self.reference.renumber(offset);
if let Some(color_fonts) = &mut self.color_fonts {
color_fonts.resources.renumber(offset);
}
if let Some(tilings) = &mut self.tilings {
tilings.resources.renumber(offset);
}
}
}
impl Default for Resources<()> {
fn default() -> Self {
Resources {
reference: (),
colors: ColorSpaces::default(),
fonts: Remapper::new("F"),
images: Remapper::new("Im"),
deferred_images: HashMap::new(),
gradients: Remapper::new("Gr"),
tilings: None,
ext_gs: Remapper::new("Gs"),
color_fonts: None,
languages: BTreeMap::new(),
glyph_sets: HashMap::new(),
color_glyph_sets: HashMap::new(),
glyph_remappers: HashMap::new(),
}
}
}
impl Resources<()> {
/// Associate a reference with this resource dictionary (and do so
/// recursively for sub-resources).
pub fn with_refs(self, refs: &ResourcesRefs) -> Resources<Ref> {
Resources {
reference: refs.reference,
colors: self.colors,
fonts: self.fonts,
images: self.images,
deferred_images: self.deferred_images,
gradients: self.gradients,
tilings: self
.tilings
.zip(refs.tilings.as_ref())
.map(|(p, r)| Box::new(p.with_refs(r))),
ext_gs: self.ext_gs,
color_fonts: self
.color_fonts
.zip(refs.color_fonts.as_ref())
.map(|(c, r)| Box::new(c.with_refs(r))),
languages: self.languages,
glyph_sets: self.glyph_sets,
color_glyph_sets: self.color_glyph_sets,
glyph_remappers: self.glyph_remappers,
}
}
}
impl<R> Resources<R> {
/// Run a function on this resource dictionary and all
/// of its sub-resources.
pub fn traverse<P>(&self, process: &mut P) -> SourceResult<()>
where
P: FnMut(&Self) -> SourceResult<()>,
{
process(self)?;
if let Some(color_fonts) = &self.color_fonts {
color_fonts.resources.traverse(process)?;
}
if let Some(tilings) = &self.tilings {
tilings.resources.traverse(process)?;
}
Ok(())
}
}
/// References for a resource tree.
///
/// This structure is a tree too, that should have the same structure as the
/// corresponding `Resources`.
pub struct ResourcesRefs {
pub reference: Ref,
pub color_fonts: Option<Box<ResourcesRefs>>,
pub tilings: Option<Box<ResourcesRefs>>,
}
impl Renumber for ResourcesRefs {
fn renumber(&mut self, offset: i32) {
self.reference.renumber(offset);
if let Some(color_fonts) = &mut self.color_fonts {
color_fonts.renumber(offset);
}
if let Some(tilings) = &mut self.tilings {
tilings.renumber(offset);
}
}
}
/// Allocate references for all resource dictionaries.
pub fn alloc_resources_refs(
context: &WithResources,
) -> SourceResult<(PdfChunk, ResourcesRefs)> {
let mut chunk = PdfChunk::new();
/// Recursively explore resource dictionaries and assign them references.
fn refs_for(resources: &Resources<()>, chunk: &mut PdfChunk) -> ResourcesRefs {
ResourcesRefs {
reference: chunk.alloc(),
color_fonts: resources
.color_fonts
.as_ref()
.map(|c| Box::new(refs_for(&c.resources, chunk))),
tilings: resources
.tilings
.as_ref()
.map(|p| Box::new(refs_for(&p.resources, chunk))),
}
}
let refs = refs_for(&context.resources, &mut chunk);
Ok((chunk, refs))
}
/// Write the resource dictionaries that will be referenced by all pages.
///
/// We add a reference to this dictionary to each page individually instead of
/// to the root node of the page tree because using the resource inheritance
/// feature breaks PDF merging with Apple Preview.
///
/// Also write resource dictionaries for Type3 fonts and PDF patterns.
pub fn write_resource_dictionaries(ctx: &WithEverything) -> SourceResult<(PdfChunk, ())> {
let mut chunk = PdfChunk::new();
let mut used_color_spaces = ColorSpaces::default();
ctx.resources.traverse(&mut |resources| {
used_color_spaces.merge(&resources.colors);
let images_ref = chunk.alloc.bump();
let patterns_ref = chunk.alloc.bump();
let ext_gs_states_ref = chunk.alloc.bump();
let color_spaces_ref = chunk.alloc.bump();
let mut color_font_slices = Vec::new();
let mut color_font_numbers = HashMap::new();
if let Some(color_fonts) = &resources.color_fonts {
for (_, font_slice) in color_fonts.iter() {
color_font_numbers.insert(font_slice.clone(), color_font_slices.len());
color_font_slices.push(font_slice);
}
}
let color_font_remapper = Remapper {
prefix: "Cf",
to_pdf: color_font_numbers,
to_items: color_font_slices,
};
resources
.images
.write(&ctx.references.images, &mut chunk.indirect(images_ref).dict());
let mut patterns_dict = chunk.indirect(patterns_ref).dict();
resources
.gradients
.write(&ctx.references.gradients, &mut patterns_dict);
if let Some(p) = &resources.tilings {
p.remapper.write(&ctx.references.tilings, &mut patterns_dict);
}
patterns_dict.finish();
resources
.ext_gs
.write(&ctx.references.ext_gs, &mut chunk.indirect(ext_gs_states_ref).dict());
let mut res_dict = chunk
.indirect(resources.reference)
.start::<pdf_writer::writers::Resources>();
res_dict.pair(Name(b"XObject"), images_ref);
res_dict.pair(Name(b"Pattern"), patterns_ref);
res_dict.pair(Name(b"ExtGState"), ext_gs_states_ref);
res_dict.pair(Name(b"ColorSpace"), color_spaces_ref);
// TODO: can't this be an indirect reference too?
let mut fonts_dict = res_dict.fonts();
resources.fonts.write(&ctx.references.fonts, &mut fonts_dict);
color_font_remapper.write(&ctx.references.color_fonts, &mut fonts_dict);
fonts_dict.finish();
res_dict.finish();
let color_spaces = chunk.indirect(color_spaces_ref).dict();
resources
.colors
.write_color_spaces(color_spaces, &ctx.globals.color_functions);
Ok(())
})?;
used_color_spaces.write_functions(&mut chunk, &ctx.globals.color_functions);
Ok((chunk, ()))
}
/// Assigns new, consecutive PDF-internal indices to items.
pub struct Remapper<T> {
/// The prefix to use when naming these resources.
prefix: &'static str,
/// Forwards from the items to the pdf indices.
to_pdf: HashMap<T, usize>,
/// Backwards from the pdf indices to the items.
to_items: Vec<T>,
}
impl<T> Remapper<T>
where
T: Eq + Hash + Clone,
{
/// Create an empty mapping.
pub fn new(prefix: &'static str) -> Self {
Self { prefix, to_pdf: HashMap::new(), to_items: vec![] }
}
/// Insert an item in the mapping if it was not already present.
pub fn insert(&mut self, item: T) -> usize {
let to_layout = &mut self.to_items;
*self.to_pdf.entry(item.clone()).or_insert_with(|| {
let pdf_index = to_layout.len();
to_layout.push(item);
pdf_index
})
}
/// All items in this
pub fn items(&self) -> impl Iterator<Item = &T> + '_ {
self.to_items.iter()
}
/// Write this list of items in a Resource dictionary.
fn write(&self, mapping: &HashMap<T, Ref>, dict: &mut Dict) {
for (number, item) in self.items().enumerate() {
let name = eco_format!("{}{}", self.prefix, number);
let reference = mapping[item];
dict.pair(Name(name.as_bytes()), reference);
}
}
}

View File

@ -0,0 +1,106 @@
use krilla::geom::{Path, PathBuilder, Rect};
use krilla::surface::Surface;
use typst_library::diag::SourceResult;
use typst_library::visualize::{Geometry, Shape};
use typst_syntax::Span;
use crate::convert::{FrameContext, GlobalContext};
use crate::paint;
use crate::util::{convert_path, AbsExt, TransformExt};
#[typst_macros::time(name = "handle shape")]
pub(crate) fn handle_shape(
fc: &mut FrameContext,
shape: &Shape,
surface: &mut Surface,
gc: &mut GlobalContext,
span: Span,
) -> SourceResult<()> {
surface.set_location(span.into_raw().get());
surface.push_transform(&fc.state().transform().to_krilla());
if let Some(path) = convert_geometry(&shape.geometry) {
let fill = if let Some(paint) = &shape.fill {
Some(paint::convert_fill(
gc,
paint,
shape.fill_rule,
false,
surface,
fc.state(),
shape.geometry.bbox_size(),
)?)
} else {
None
};
let stroke = shape.stroke.as_ref().and_then(|stroke| {
if stroke.thickness.to_f32() > 0.0 {
Some(stroke)
} else {
None
}
});
let stroke = if let Some(stroke) = &stroke {
let stroke = paint::convert_stroke(
gc,
stroke,
false,
surface,
fc.state(),
shape.geometry.bbox_size(),
)?;
Some(stroke)
} else {
None
};
// Otherwise, krilla will by default fill with a black paint.
if fill.is_some() || stroke.is_some() {
surface.set_fill(fill);
surface.set_stroke(stroke);
surface.draw_path(&path);
}
}
surface.pop();
surface.reset_location();
Ok(())
}
fn convert_geometry(geometry: &Geometry) -> Option<Path> {
let mut path_builder = PathBuilder::new();
match geometry {
Geometry::Line(l) => {
path_builder.move_to(0.0, 0.0);
path_builder.line_to(l.x.to_f32(), l.y.to_f32());
}
Geometry::Rect(size) => {
let w = size.x.to_f32();
let h = size.y.to_f32();
let rect = if w < 0.0 || h < 0.0 {
// krilla doesn't normally allow for negative dimensions, but
// Typst supports them, so we apply a transform if needed.
let transform =
krilla::geom::Transform::from_scale(w.signum(), h.signum());
Rect::from_xywh(0.0, 0.0, w.abs(), h.abs())
.and_then(|rect| rect.transform(transform))
} else {
Rect::from_xywh(0.0, 0.0, w, h)
};
if let Some(rect) = rect {
path_builder.push_rect(rect);
}
}
Geometry::Curve(c) => {
convert_path(c, &mut path_builder);
}
}
path_builder.finish()
}

View File

@ -0,0 +1,135 @@
use std::ops::Range;
use std::sync::Arc;
use bytemuck::TransparentWrapper;
use krilla::surface::{Location, Surface};
use krilla::text::GlyphId;
use typst_library::diag::{bail, SourceResult};
use typst_library::layout::Size;
use typst_library::text::{Font, Glyph, TextItem};
use typst_library::visualize::FillRule;
use typst_syntax::Span;
use crate::convert::{FrameContext, GlobalContext};
use crate::paint;
use crate::util::{display_font, AbsExt, TransformExt};
#[typst_macros::time(name = "handle text")]
pub(crate) fn handle_text(
fc: &mut FrameContext,
t: &TextItem,
surface: &mut Surface,
gc: &mut GlobalContext,
) -> SourceResult<()> {
*gc.languages.entry(t.lang).or_insert(0) += t.glyphs.len();
let font = convert_font(gc, t.font.clone())?;
let fill = paint::convert_fill(
gc,
&t.fill,
FillRule::NonZero,
true,
surface,
fc.state(),
Size::zero(),
)?;
let stroke =
if let Some(stroke) = t.stroke.as_ref().map(|s| {
paint::convert_stroke(gc, s, true, surface, fc.state(), Size::zero())
}) {
Some(stroke?)
} else {
None
};
let text = t.text.as_str();
let size = t.size;
let glyphs: &[PdfGlyph] = TransparentWrapper::wrap_slice(t.glyphs.as_slice());
surface.push_transform(&fc.state().transform().to_krilla());
surface.set_fill(Some(fill));
surface.set_stroke(stroke);
surface.draw_glyphs(
krilla::geom::Point::from_xy(0.0, 0.0),
glyphs,
font.clone(),
text,
size.to_f32(),
false,
);
surface.pop();
Ok(())
}
fn convert_font(
gc: &mut GlobalContext,
typst_font: Font,
) -> SourceResult<krilla::text::Font> {
if let Some(font) = gc.fonts_forward.get(&typst_font) {
Ok(font.clone())
} else {
let font = build_font(typst_font.clone())?;
gc.fonts_forward.insert(typst_font.clone(), font.clone());
gc.fonts_backward.insert(font.clone(), typst_font.clone());
Ok(font)
}
}
#[comemo::memoize]
fn build_font(typst_font: Font) -> SourceResult<krilla::text::Font> {
let font_data: Arc<dyn AsRef<[u8]> + Send + Sync> =
Arc::new(typst_font.data().clone());
match krilla::text::Font::new(font_data.into(), typst_font.index()) {
None => {
let font_str = display_font(&typst_font);
bail!(Span::detached(), "failed to process font {font_str}");
}
Some(f) => Ok(f),
}
}
#[derive(TransparentWrapper, Debug)]
#[repr(transparent)]
struct PdfGlyph(Glyph);
impl krilla::text::Glyph for PdfGlyph {
#[inline(always)]
fn glyph_id(&self) -> GlyphId {
GlyphId::new(self.0.id as u32)
}
#[inline(always)]
fn text_range(&self) -> Range<usize> {
self.0.range.start as usize..self.0.range.end as usize
}
#[inline(always)]
fn x_advance(&self, size: f32) -> f32 {
// Don't use `Em::at`, because it contains an expensive check whether the result is finite.
self.0.x_advance.get() as f32 * size
}
#[inline(always)]
fn x_offset(&self, size: f32) -> f32 {
// Don't use `Em::at`, because it contains an expensive check whether the result is finite.
self.0.x_offset.get() as f32 * size
}
#[inline(always)]
fn y_offset(&self, _: f32) -> f32 {
0.0
}
#[inline(always)]
fn y_advance(&self, _: f32) -> f32 {
0.0
}
fn location(&self) -> Option<Location> {
Some(self.0.span.0.into_raw().get())
}
}

View File

@ -1,184 +0,0 @@
use std::collections::HashMap;
use ecow::eco_format;
use pdf_writer::types::{ColorSpaceOperand, PaintType, TilingType};
use pdf_writer::{Filter, Name, Rect, Ref};
use typst_library::diag::SourceResult;
use typst_library::layout::{Abs, Ratio, Transform};
use typst_library::visualize::{RelativeTo, Tiling};
use typst_utils::Numeric;
use crate::color::PaintEncode;
use crate::resources::{Remapper, ResourcesRefs};
use crate::{content, transform_to_array, PdfChunk, Resources, WithGlobalRefs};
/// Writes the actual patterns (tiling patterns) to the PDF.
/// This is performed once after writing all pages.
pub fn write_tilings(
context: &WithGlobalRefs,
) -> SourceResult<(PdfChunk, HashMap<PdfTiling, Ref>)> {
let mut chunk = PdfChunk::new();
let mut out = HashMap::new();
context.resources.traverse(&mut |resources| {
let Some(patterns) = &resources.tilings else {
return Ok(());
};
for pdf_pattern in patterns.remapper.items() {
let PdfTiling { transform, pattern, content, .. } = pdf_pattern;
if out.contains_key(pdf_pattern) {
continue;
}
let tiling = chunk.alloc();
out.insert(pdf_pattern.clone(), tiling);
let mut tiling_pattern = chunk.tiling_pattern(tiling, content);
tiling_pattern
.tiling_type(TilingType::ConstantSpacing)
.paint_type(PaintType::Colored)
.bbox(Rect::new(
0.0,
0.0,
pattern.size().x.to_pt() as _,
pattern.size().y.to_pt() as _,
))
.x_step((pattern.size().x + pattern.spacing().x).to_pt() as _)
.y_step((pattern.size().y + pattern.spacing().y).to_pt() as _);
// The actual resource dict will be written in a later step
tiling_pattern.pair(Name(b"Resources"), patterns.resources.reference);
tiling_pattern
.matrix(transform_to_array(
transform
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()))
.post_concat(Transform::translate(
Abs::zero(),
pattern.spacing().y,
)),
))
.filter(Filter::FlateDecode);
}
Ok(())
})?;
Ok((chunk, out))
}
/// A pattern and its transform.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct PdfTiling {
/// The transform to apply to the pattern.
pub transform: Transform,
/// The pattern to paint.
pub pattern: Tiling,
/// The rendered pattern.
pub content: Vec<u8>,
}
/// Registers a pattern with the PDF.
fn register_pattern(
ctx: &mut content::Builder,
pattern: &Tiling,
on_text: bool,
mut transforms: content::Transforms,
) -> SourceResult<usize> {
let patterns = ctx
.resources
.tilings
.get_or_insert_with(|| Box::new(TilingRemapper::new()));
// Edge cases for strokes.
if transforms.size.x.is_zero() {
transforms.size.x = Abs::pt(1.0);
}
if transforms.size.y.is_zero() {
transforms.size.y = Abs::pt(1.0);
}
let transform = match pattern.unwrap_relative(on_text) {
RelativeTo::Self_ => transforms.transform,
RelativeTo::Parent => transforms.container_transform,
};
// Render the body.
let content = content::build(
ctx.options,
&mut patterns.resources,
pattern.frame(),
None,
None,
)?;
let pdf_pattern = PdfTiling {
transform,
pattern: pattern.clone(),
content: content.content.wait().clone(),
};
Ok(patterns.remapper.insert(pdf_pattern))
}
impl PaintEncode for Tiling {
fn set_as_fill(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_fill_color_space();
let index = register_pattern(ctx, self, on_text, transforms)?;
let id = eco_format!("P{index}");
let name = Name(id.as_bytes());
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_fill_pattern(None, name);
Ok(())
}
fn set_as_stroke(
&self,
ctx: &mut content::Builder,
on_text: bool,
transforms: content::Transforms,
) -> SourceResult<()> {
ctx.reset_stroke_color_space();
let index = register_pattern(ctx, self, on_text, transforms)?;
let id = eco_format!("P{index}");
let name = Name(id.as_bytes());
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
ctx.content.set_stroke_pattern(None, name);
Ok(())
}
}
/// De-duplicate patterns and the resources they require to be drawn.
pub struct TilingRemapper<R> {
/// Pattern de-duplicator.
pub remapper: Remapper<PdfTiling>,
/// PDF resources that are used by these patterns.
pub resources: Resources<R>,
}
impl TilingRemapper<()> {
pub fn new() -> Self {
Self {
remapper: Remapper::new("P"),
resources: Resources::default(),
}
}
/// Allocate a reference to the resource dictionary of these patterns.
pub fn with_refs(self, refs: &ResourcesRefs) -> TilingRemapper<Ref> {
TilingRemapper {
remapper: self.remapper,
resources: self.resources.with_refs(refs),
}
}
}

View File

@ -0,0 +1,120 @@
//! Basic utilities for converting typst types to krilla.
use krilla::geom as kg;
use krilla::geom::PathBuilder;
use krilla::paint as kp;
use typst_library::layout::{Abs, Point, Size, Transform};
use typst_library::text::Font;
use typst_library::visualize::{Curve, CurveItem, FillRule, LineCap, LineJoin};
pub(crate) trait SizeExt {
fn to_krilla(&self) -> kg::Size;
}
impl SizeExt for Size {
fn to_krilla(&self) -> kg::Size {
kg::Size::from_wh(self.x.to_f32(), self.y.to_f32()).unwrap()
}
}
pub(crate) trait PointExt {
fn to_krilla(&self) -> kg::Point;
}
impl PointExt for Point {
fn to_krilla(&self) -> kg::Point {
kg::Point::from_xy(self.x.to_f32(), self.y.to_f32())
}
}
pub(crate) trait LineCapExt {
fn to_krilla(&self) -> kp::LineCap;
}
impl LineCapExt for LineCap {
fn to_krilla(&self) -> kp::LineCap {
match self {
LineCap::Butt => kp::LineCap::Butt,
LineCap::Round => kp::LineCap::Round,
LineCap::Square => kp::LineCap::Square,
}
}
}
pub(crate) trait LineJoinExt {
fn to_krilla(&self) -> kp::LineJoin;
}
impl LineJoinExt for LineJoin {
fn to_krilla(&self) -> kp::LineJoin {
match self {
LineJoin::Miter => kp::LineJoin::Miter,
LineJoin::Round => kp::LineJoin::Round,
LineJoin::Bevel => kp::LineJoin::Bevel,
}
}
}
pub(crate) trait TransformExt {
fn to_krilla(&self) -> kg::Transform;
}
impl TransformExt for Transform {
fn to_krilla(&self) -> kg::Transform {
kg::Transform::from_row(
self.sx.get() as f32,
self.ky.get() as f32,
self.kx.get() as f32,
self.sy.get() as f32,
self.tx.to_f32(),
self.ty.to_f32(),
)
}
}
pub(crate) trait FillRuleExt {
fn to_krilla(&self) -> kp::FillRule;
}
impl FillRuleExt for FillRule {
fn to_krilla(&self) -> kp::FillRule {
match self {
FillRule::NonZero => kp::FillRule::NonZero,
FillRule::EvenOdd => kp::FillRule::EvenOdd,
}
}
}
pub(crate) trait AbsExt {
fn to_f32(self) -> f32;
}
impl AbsExt for Abs {
fn to_f32(self) -> f32 {
self.to_pt() as f32
}
}
/// Display the font family of a font.
pub(crate) fn display_font(font: &Font) -> &str {
&font.info().family
}
/// Convert a typst path to a krilla path.
pub(crate) fn convert_path(path: &Curve, builder: &mut PathBuilder) {
for item in &path.0 {
match item {
CurveItem::Move(p) => builder.move_to(p.x.to_f32(), p.y.to_f32()),
CurveItem::Line(p) => builder.line_to(p.x.to_f32(), p.y.to_f32()),
CurveItem::Cubic(p1, p2, p3) => builder.cubic_to(
p1.x.to_f32(),
p1.y.to_f32(),
p2.x.to_f32(),
p2.y.to_f32(),
p3.x.to_f32(),
p3.y.to_f32(),
),
CurveItem::Close => builder.close(),
}
}
}