mirror of
https://github.com/typst/typst
synced 2025-06-08 05:06:24 +08:00
Remove old stuff
This commit is contained in:
parent
880aa4b8a1
commit
f2d4db286c
18
Cargo.lock
generated
18
Cargo.lock
generated
@ -3102,6 +3102,23 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "typst-pdf"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"comemo",
|
||||
"ecow",
|
||||
"image",
|
||||
"krilla",
|
||||
"typst-assets",
|
||||
"typst-library",
|
||||
"typst-macros",
|
||||
"typst-syntax",
|
||||
"typst-timing",
|
||||
"typst-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typst-pdf-old"
|
||||
version = "0.12.0"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"base64",
|
||||
@ -3110,7 +3127,6 @@ dependencies = [
|
||||
"ecow",
|
||||
"image",
|
||||
"indexmap 2.6.0",
|
||||
"krilla",
|
||||
"miniz_oxide",
|
||||
"pdf-writer 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde",
|
||||
|
@ -17,7 +17,7 @@ use typst::html::HtmlDocument;
|
||||
use typst::layout::{Frame, Page, PageRanges, PagedDocument};
|
||||
use typst::syntax::{FileId, Source, Span};
|
||||
use typst::WorldExt;
|
||||
use typst_pdf::{PdfOptions, PdfStandards, Validator};
|
||||
use typst_pdf::{PdfOptions, Validator};
|
||||
|
||||
use crate::args::{
|
||||
CompileArgs, CompileCommand, DiagnosticFormat, Input, Output, OutputFormat,
|
||||
|
@ -19,21 +19,11 @@ typst-macros = { workspace = true }
|
||||
typst-syntax = { workspace = true }
|
||||
typst-timing = { workspace = true }
|
||||
typst-utils = { workspace = true }
|
||||
arrayvec = { workspace = true }
|
||||
base64 = { workspace = true }
|
||||
bytemuck = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
image = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
krilla = { workspace = true }
|
||||
miniz_oxide = { workspace = true }
|
||||
pdf-writer = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
subsetter = { workspace = true }
|
||||
svg2pdf = { workspace = true }
|
||||
ttf-parser = { workspace = true }
|
||||
xmp-writer = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -1,327 +0,0 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::Direction;
|
||||
use pdf_writer::writers::PageLabel;
|
||||
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::{Datetime, Smart};
|
||||
use typst_library::layout::Dir;
|
||||
use typst_library::text::Lang;
|
||||
use xmp_writer::{DateTime, LangId, RenditionClass, Timezone, XmpWriter};
|
||||
|
||||
use crate::page_old::PdfPageLabel;
|
||||
use crate::{hash_base64, outline_old, TextStrExt, WithEverything};
|
||||
|
||||
/// Write the document catalog.
|
||||
pub fn write_catalog(
|
||||
ctx: WithEverything,
|
||||
pdf: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
) -> SourceResult<()> {
|
||||
let lang = ctx
|
||||
.resources
|
||||
.languages
|
||||
.iter()
|
||||
.max_by_key(|(_, &count)| count)
|
||||
.map(|(&l, _)| l);
|
||||
|
||||
let dir = if lang.map(Lang::dir) == Some(Dir::RTL) {
|
||||
Direction::R2L
|
||||
} else {
|
||||
Direction::L2R
|
||||
};
|
||||
|
||||
// Write the outline tree.
|
||||
let outline_root_id = outline_old::write_outline(pdf, alloc, &ctx);
|
||||
|
||||
// Write the page labels.
|
||||
let page_labels = write_page_labels(pdf, alloc, &ctx);
|
||||
|
||||
// Write the document information.
|
||||
let info_ref = alloc.bump();
|
||||
let mut info = pdf.document_info(info_ref);
|
||||
let mut xmp = XmpWriter::new();
|
||||
if let Some(title) = &ctx.document.info.title {
|
||||
info.title(TextStr::trimmed(title));
|
||||
xmp.title([(None, title.as_str())]);
|
||||
}
|
||||
|
||||
if let Some(description) = &ctx.document.info.description {
|
||||
info.subject(TextStr::trimmed(description));
|
||||
xmp.description([(None, description.as_str())]);
|
||||
}
|
||||
|
||||
let authors = &ctx.document.info.author;
|
||||
if !authors.is_empty() {
|
||||
// Turns out that if the authors are given in both the document
|
||||
// information dictionary and the XMP metadata, Acrobat takes a little
|
||||
// bit of both: The first author from the document information
|
||||
// dictionary and the remaining authors from the XMP metadata.
|
||||
//
|
||||
// To fix this for Acrobat, we could omit the remaining authors or all
|
||||
// metadata from the document information catalog (it is optional) and
|
||||
// only write XMP. However, not all other tools (including Apple
|
||||
// Preview) read the XMP data. This means we do want to include all
|
||||
// authors in the document information dictionary.
|
||||
//
|
||||
// Thus, the only alternative is to fold all authors into a single
|
||||
// `<rdf:li>` in the XMP metadata. This is, in fact, exactly what the
|
||||
// PDF/A spec Part 1 section 6.7.3 has to say about the matter. It's a
|
||||
// bit weird to not use the array (and it makes Acrobat show the author
|
||||
// list in quotes), but there's not much we can do about that.
|
||||
let joined = authors.join(", ");
|
||||
info.author(TextStr::trimmed(&joined));
|
||||
xmp.creator([joined.as_str()]);
|
||||
}
|
||||
|
||||
let creator = eco_format!("Typst {}", env!("CARGO_PKG_VERSION"));
|
||||
info.creator(TextStr(&creator));
|
||||
xmp.creator_tool(&creator);
|
||||
|
||||
let keywords = &ctx.document.info.keywords;
|
||||
if !keywords.is_empty() {
|
||||
let joined = keywords.join(", ");
|
||||
info.keywords(TextStr::trimmed(&joined));
|
||||
xmp.pdf_keywords(&joined);
|
||||
}
|
||||
|
||||
let date = ctx.document.info.date.unwrap_or(ctx.options.timestamp);
|
||||
let tz = ctx.document.info.date.is_auto();
|
||||
if let Some(date) = date {
|
||||
if let Some(pdf_date) = pdf_date(date, tz) {
|
||||
info.creation_date(pdf_date);
|
||||
info.modified_date(pdf_date);
|
||||
}
|
||||
}
|
||||
|
||||
info.finish();
|
||||
|
||||
// A unique ID for this instance of the document. Changes if anything
|
||||
// changes in the frames.
|
||||
let instance_id = hash_base64(&pdf.as_bytes());
|
||||
|
||||
// Determine the document's ID. It should be as stable as possible.
|
||||
const PDF_VERSION: &str = "PDF-1.7";
|
||||
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
|
||||
// We were provided with a stable ID. Yay!
|
||||
hash_base64(&(PDF_VERSION, ident))
|
||||
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
|
||||
// If not provided from the outside, but title and author were given, we
|
||||
// compute a hash of them, which should be reasonably stable and unique.
|
||||
hash_base64(&(PDF_VERSION, &ctx.document.info.title, &ctx.document.info.author))
|
||||
} else {
|
||||
// The user provided no usable metadata which we can use as an `/ID`.
|
||||
instance_id.clone()
|
||||
};
|
||||
|
||||
xmp.document_id(&doc_id);
|
||||
xmp.instance_id(&instance_id);
|
||||
xmp.format("application/pdf");
|
||||
xmp.pdf_version("1.7");
|
||||
xmp.language(ctx.resources.languages.keys().map(|lang| LangId(lang.as_str())));
|
||||
xmp.num_pages(ctx.document.pages.len() as u32);
|
||||
xmp.rendition_class(RenditionClass::Proof);
|
||||
|
||||
if let Some(xmp_date) = date.and_then(|date| xmp_date(date, tz)) {
|
||||
xmp.create_date(xmp_date);
|
||||
xmp.modify_date(xmp_date);
|
||||
|
||||
// if ctx.options.standards.pdfa {
|
||||
// let mut history = xmp.history();
|
||||
// history
|
||||
// .add_event()
|
||||
// .action(xmp_writer::ResourceEventAction::Saved)
|
||||
// .when(xmp_date)
|
||||
// .instance_id(&eco_format!("{instance_id}_source"));
|
||||
// history
|
||||
// .add_event()
|
||||
// .action(xmp_writer::ResourceEventAction::Converted)
|
||||
// .when(xmp_date)
|
||||
// .instance_id(&instance_id)
|
||||
// .software_agent(&creator);
|
||||
// }
|
||||
}
|
||||
|
||||
// // Assert dominance.
|
||||
// if ctx.options.standards.pdfa {
|
||||
// let mut extension_schemas = xmp.extension_schemas();
|
||||
// extension_schemas
|
||||
// .xmp_media_management()
|
||||
// .properties()
|
||||
// .describe_instance_id();
|
||||
// extension_schemas.pdf().properties().describe_all();
|
||||
// extension_schemas.finish();
|
||||
// xmp.pdfa_part(2);
|
||||
// xmp.pdfa_conformance("B");
|
||||
// }
|
||||
|
||||
let xmp_buf = xmp.finish(None);
|
||||
let meta_ref = alloc.bump();
|
||||
pdf.stream(meta_ref, xmp_buf.as_bytes())
|
||||
.pair(Name(b"Type"), Name(b"Metadata"))
|
||||
.pair(Name(b"Subtype"), Name(b"XML"));
|
||||
|
||||
// Set IDs only now, so that we don't need to clone them.
|
||||
pdf.set_file_id((doc_id.into_bytes(), instance_id.into_bytes()));
|
||||
|
||||
// Write the document catalog.
|
||||
let catalog_ref = alloc.bump();
|
||||
let mut catalog = pdf.catalog(catalog_ref);
|
||||
catalog.pages(ctx.page_tree_ref);
|
||||
catalog.viewer_preferences().direction(dir);
|
||||
catalog.metadata(meta_ref);
|
||||
|
||||
// Write the named destination tree if there are any entries.
|
||||
if !ctx.references.named_destinations.dests.is_empty() {
|
||||
let mut name_dict = catalog.names();
|
||||
let mut dests_name_tree = name_dict.destinations();
|
||||
let mut names = dests_name_tree.names();
|
||||
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
|
||||
names.insert(Str(name.resolve().as_bytes()), dest_ref);
|
||||
}
|
||||
}
|
||||
|
||||
// Insert the page labels.
|
||||
if !page_labels.is_empty() {
|
||||
let mut num_tree = catalog.page_labels();
|
||||
let mut entries = num_tree.nums();
|
||||
for (n, r) in &page_labels {
|
||||
entries.insert(n.get() as i32 - 1, *r);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(outline_root_id) = outline_root_id {
|
||||
catalog.outlines(outline_root_id);
|
||||
}
|
||||
|
||||
if let Some(lang) = lang {
|
||||
catalog.lang(TextStr(lang.as_str()));
|
||||
}
|
||||
|
||||
// if ctx.options.standards.pdfa {
|
||||
// catalog
|
||||
// .output_intents()
|
||||
// .push()
|
||||
// .subtype(pdf_writer::types::OutputIntentSubtype::PDFA)
|
||||
// .output_condition(TextStr("sRGB"))
|
||||
// .output_condition_identifier(TextStr("Custom"))
|
||||
// .info(TextStr("sRGB IEC61966-2.1"))
|
||||
// .dest_output_profile(ctx.globals.color_functions.srgb.unwrap());
|
||||
// }
|
||||
|
||||
catalog.finish();
|
||||
|
||||
// if ctx.options.standards.pdfa && pdf.refs().count() > 8388607 {
|
||||
// bail!(Span::detached(), "too many PDF objects");
|
||||
// }
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Write the page labels.
|
||||
pub(crate) fn write_page_labels(
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
ctx: &WithEverything,
|
||||
) -> Vec<(NonZeroUsize, Ref)> {
|
||||
// If there is no exported page labeled, we skip the writing
|
||||
if !ctx.pages.iter().filter_map(Option::as_ref).any(|p| {
|
||||
p.label
|
||||
.as_ref()
|
||||
.is_some_and(|l| l.prefix.is_some() || l.style.is_some())
|
||||
}) {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let empty_label = PdfPageLabel::default();
|
||||
let mut result = vec![];
|
||||
let mut prev: Option<&PdfPageLabel> = None;
|
||||
|
||||
// Skip non-exported pages for numbering.
|
||||
for (i, page) in ctx.pages.iter().filter_map(Option::as_ref).enumerate() {
|
||||
let nr = NonZeroUsize::new(1 + i).unwrap();
|
||||
// If there are pages with empty labels between labeled pages, we must
|
||||
// write empty PageLabel entries.
|
||||
let label = page.label.as_ref().unwrap_or(&empty_label);
|
||||
|
||||
if let Some(pre) = prev {
|
||||
if label.prefix == pre.prefix
|
||||
&& label.style == pre.style
|
||||
&& label.offset == pre.offset.map(|n| n.saturating_add(1))
|
||||
{
|
||||
prev = Some(label);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let id = alloc.bump();
|
||||
let mut entry = chunk.indirect(id).start::<PageLabel>();
|
||||
|
||||
// Only add what is actually provided. Don't add empty prefix string if
|
||||
// it wasn't given for example.
|
||||
if let Some(prefix) = &label.prefix {
|
||||
entry.prefix(TextStr::trimmed(prefix));
|
||||
}
|
||||
|
||||
if let Some(style) = label.style {
|
||||
entry.style(style.to_pdf_numbering_style());
|
||||
}
|
||||
|
||||
if let Some(offset) = label.offset {
|
||||
entry.offset(offset.get() as i32);
|
||||
}
|
||||
|
||||
result.push((nr, id));
|
||||
prev = Some(label);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Converts a datetime to a pdf-writer date.
|
||||
fn pdf_date(datetime: Datetime, tz: bool) -> Option<pdf_writer::Date> {
|
||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||
|
||||
let mut pdf_date = pdf_writer::Date::new(year);
|
||||
|
||||
if let Some(month) = datetime.month() {
|
||||
pdf_date = pdf_date.month(month);
|
||||
}
|
||||
|
||||
if let Some(day) = datetime.day() {
|
||||
pdf_date = pdf_date.day(day);
|
||||
}
|
||||
|
||||
if let Some(h) = datetime.hour() {
|
||||
pdf_date = pdf_date.hour(h);
|
||||
}
|
||||
|
||||
if let Some(m) = datetime.minute() {
|
||||
pdf_date = pdf_date.minute(m);
|
||||
}
|
||||
|
||||
if let Some(s) = datetime.second() {
|
||||
pdf_date = pdf_date.second(s);
|
||||
}
|
||||
|
||||
if tz {
|
||||
pdf_date = pdf_date.utc_offset_hour(0).utc_offset_minute(0);
|
||||
}
|
||||
|
||||
Some(pdf_date)
|
||||
}
|
||||
|
||||
/// Converts a datetime to an xmp-writer datetime.
|
||||
fn xmp_date(datetime: Datetime, tz: bool) -> Option<xmp_writer::DateTime> {
|
||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||
Some(DateTime {
|
||||
year,
|
||||
month: datetime.month(),
|
||||
day: datetime.day(),
|
||||
hour: datetime.hour(),
|
||||
minute: datetime.minute(),
|
||||
second: datetime.second(),
|
||||
timezone: if tz { Some(Timezone::Utc) } else { None },
|
||||
})
|
||||
}
|
@ -1,344 +0,0 @@
|
||||
//! OpenType fonts generally define monochrome glyphs, but they can also define
|
||||
//! glyphs with colors. This is how emojis are generally implemented for
|
||||
//! example.
|
||||
//!
|
||||
//! There are various standards to represent color glyphs, but PDF readers don't
|
||||
//! support any of them natively, so Typst has to handle them manually.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use ecow::eco_format;
|
||||
use indexmap::IndexMap;
|
||||
use pdf_writer::types::UnicodeCmap;
|
||||
use pdf_writer::writers::WMode;
|
||||
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
|
||||
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
|
||||
use typst_library::foundations::Repr;
|
||||
use typst_library::layout::Em;
|
||||
use typst_library::text::color::glyph_frame;
|
||||
use typst_library::text::{Font, Glyph, TextItemView};
|
||||
|
||||
use crate::font_old::{base_font_name, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
|
||||
use crate::resources_old::{Resources, ResourcesRefs};
|
||||
use crate::{content_old, EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
|
||||
|
||||
/// Write color fonts in the PDF document.
|
||||
///
|
||||
/// They are written as Type3 fonts, which map glyph IDs to arbitrary PDF
|
||||
/// instructions.
|
||||
pub fn write_color_fonts(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
|
||||
let mut out = HashMap::new();
|
||||
let mut chunk = PdfChunk::new();
|
||||
context.resources.traverse(&mut |resources: &Resources| {
|
||||
let Some(color_fonts) = &resources.color_fonts else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for (color_font, font_slice) in color_fonts.iter() {
|
||||
if out.contains_key(&font_slice) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Allocate some IDs.
|
||||
let subfont_id = chunk.alloc();
|
||||
let cmap_ref = chunk.alloc();
|
||||
let descriptor_ref = chunk.alloc();
|
||||
let widths_ref = chunk.alloc();
|
||||
|
||||
// And a map between glyph IDs and the instructions to draw this
|
||||
// glyph.
|
||||
let mut glyphs_to_instructions = Vec::new();
|
||||
|
||||
let start = font_slice.subfont * 256;
|
||||
let end = (start + 256).min(color_font.glyphs.len());
|
||||
let glyph_count = end - start;
|
||||
let subset = &color_font.glyphs[start..end];
|
||||
let mut widths = Vec::new();
|
||||
let mut gids = Vec::new();
|
||||
|
||||
let scale_factor = font_slice.font.ttf().units_per_em() as f32;
|
||||
|
||||
// Write the instructions for each glyph.
|
||||
for color_glyph in subset {
|
||||
let instructions_stream_ref = chunk.alloc();
|
||||
let width = font_slice
|
||||
.font
|
||||
.advance(color_glyph.gid)
|
||||
.unwrap_or(Em::new(0.0))
|
||||
.get() as f32
|
||||
* scale_factor;
|
||||
widths.push(width);
|
||||
chunk
|
||||
.stream(
|
||||
instructions_stream_ref,
|
||||
color_glyph.instructions.content.wait(),
|
||||
)
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
// Use this stream as instructions to draw the glyph.
|
||||
glyphs_to_instructions.push(instructions_stream_ref);
|
||||
gids.push(color_glyph.gid);
|
||||
}
|
||||
|
||||
// Determine the base font name.
|
||||
gids.sort();
|
||||
let base_font = base_font_name(&font_slice.font, &gids);
|
||||
|
||||
// Write the Type3 font object.
|
||||
let mut pdf_font = chunk.type3_font(subfont_id);
|
||||
pdf_font.name(Name(base_font.as_bytes()));
|
||||
pdf_font.pair(Name(b"Resources"), color_fonts.resources.reference);
|
||||
pdf_font.bbox(color_font.bbox);
|
||||
pdf_font.matrix([1.0 / scale_factor, 0.0, 0.0, 1.0 / scale_factor, 0.0, 0.0]);
|
||||
pdf_font.first_char(0);
|
||||
pdf_font.last_char((glyph_count - 1) as u8);
|
||||
pdf_font.pair(Name(b"Widths"), widths_ref);
|
||||
pdf_font.to_unicode(cmap_ref);
|
||||
pdf_font.font_descriptor(descriptor_ref);
|
||||
|
||||
// Write the /CharProcs dictionary, that maps glyph names to
|
||||
// drawing instructions.
|
||||
let mut char_procs = pdf_font.char_procs();
|
||||
for (gid, instructions_ref) in glyphs_to_instructions.iter().enumerate() {
|
||||
char_procs
|
||||
.pair(Name(eco_format!("glyph{gid}").as_bytes()), *instructions_ref);
|
||||
}
|
||||
char_procs.finish();
|
||||
|
||||
// Write the /Encoding dictionary.
|
||||
let names = (0..glyph_count)
|
||||
.map(|gid| eco_format!("glyph{gid}"))
|
||||
.collect::<Vec<_>>();
|
||||
pdf_font
|
||||
.encoding_custom()
|
||||
.differences()
|
||||
.consecutive(0, names.iter().map(|name| Name(name.as_bytes())));
|
||||
pdf_font.finish();
|
||||
|
||||
// Encode a CMAP to make it possible to search or copy glyphs.
|
||||
let glyph_set = resources.color_glyph_sets.get(&font_slice.font).unwrap();
|
||||
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
|
||||
for (index, glyph) in subset.iter().enumerate() {
|
||||
let Some(text) = glyph_set.get(&glyph.gid) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if !text.is_empty() {
|
||||
cmap.pair_with_multiple(index as u8, text.chars());
|
||||
}
|
||||
}
|
||||
chunk.cmap(cmap_ref, &cmap.finish()).writing_mode(WMode::Horizontal);
|
||||
|
||||
// Write the font descriptor.
|
||||
write_font_descriptor(
|
||||
&mut chunk,
|
||||
descriptor_ref,
|
||||
&font_slice.font,
|
||||
&base_font,
|
||||
);
|
||||
|
||||
// Write the widths array
|
||||
chunk.indirect(widths_ref).array().items(widths);
|
||||
|
||||
out.insert(font_slice, subfont_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
|
||||
///
|
||||
/// This mapping is one-to-many because there can only be 256 glyphs in a Type 3
|
||||
/// font, and fonts generally have more color glyphs than that.
|
||||
pub struct ColorFontMap<R> {
|
||||
/// The mapping itself.
|
||||
map: IndexMap<Font, ColorFont>,
|
||||
/// The resources required to render the fonts in this map.
|
||||
///
|
||||
/// For example, this can be the images for glyphs based on bitmaps or SVG.
|
||||
pub resources: Resources<R>,
|
||||
/// The number of font slices (groups of 256 color glyphs), across all color
|
||||
/// fonts.
|
||||
total_slice_count: usize,
|
||||
}
|
||||
|
||||
/// A collection of Type3 font, belonging to the same TTF font.
|
||||
pub struct ColorFont {
|
||||
/// The IDs of each sub-slice of this font. They are the numbers after "Cf"
|
||||
/// in the Resources dictionaries.
|
||||
slice_ids: Vec<usize>,
|
||||
/// The list of all color glyphs in this family.
|
||||
///
|
||||
/// The index in this vector modulo 256 corresponds to the index in one of
|
||||
/// the Type3 fonts in `refs` (the `n`-th in the vector, where `n` is the
|
||||
/// quotient of the index divided by 256).
|
||||
pub glyphs: Vec<ColorGlyph>,
|
||||
/// The global bounding box of the font.
|
||||
pub bbox: Rect,
|
||||
/// A mapping between glyph IDs and character indices in the `glyphs`
|
||||
/// vector.
|
||||
glyph_indices: HashMap<u16, usize>,
|
||||
}
|
||||
|
||||
/// A single color glyph.
|
||||
pub struct ColorGlyph {
|
||||
/// The ID of the glyph.
|
||||
pub gid: u16,
|
||||
/// Instructions to draw the glyph.
|
||||
pub instructions: content_old::Encoded,
|
||||
}
|
||||
|
||||
impl ColorFontMap<()> {
|
||||
/// Creates a new empty mapping
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
map: IndexMap::new(),
|
||||
total_slice_count: 0,
|
||||
resources: Resources::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given glyph in a TTF font, give the ID of the Type3 font and the
|
||||
/// index of the glyph inside of this Type3 font.
|
||||
///
|
||||
/// If this is the first occurrence of this glyph in this font, it will
|
||||
/// start its encoding and add it to the list of known glyphs.
|
||||
pub fn get(
|
||||
&mut self,
|
||||
options: &PdfOptions,
|
||||
text: &TextItemView,
|
||||
glyph: &Glyph,
|
||||
) -> SourceResult<(usize, u8)> {
|
||||
let font = &text.item.font;
|
||||
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
|
||||
let global_bbox = font.ttf().global_bounding_box();
|
||||
let bbox = Rect::new(
|
||||
font.to_em(global_bbox.x_min).to_font_units(),
|
||||
font.to_em(global_bbox.y_min).to_font_units(),
|
||||
font.to_em(global_bbox.x_max).to_font_units(),
|
||||
font.to_em(global_bbox.y_max).to_font_units(),
|
||||
);
|
||||
ColorFont {
|
||||
bbox,
|
||||
slice_ids: Vec::new(),
|
||||
glyphs: Vec::new(),
|
||||
glyph_indices: HashMap::new(),
|
||||
}
|
||||
});
|
||||
|
||||
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&glyph.id) {
|
||||
// If we already know this glyph, return it.
|
||||
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
|
||||
} else {
|
||||
// Otherwise, allocate a new ColorGlyph in the font, and a new Type3 font
|
||||
// if needed
|
||||
let index = color_font.glyphs.len();
|
||||
if index % 256 == 0 {
|
||||
color_font.slice_ids.push(self.total_slice_count);
|
||||
self.total_slice_count += 1;
|
||||
}
|
||||
|
||||
let (frame, tofu) = glyph_frame(font, glyph.id);
|
||||
// if options.standards.pdfa && tofu {
|
||||
// bail!(failed_to_convert(text, glyph));
|
||||
// }
|
||||
|
||||
let width = font.advance(glyph.id).unwrap_or(Em::new(0.0)).get()
|
||||
* font.units_per_em();
|
||||
let instructions = content_old::build(
|
||||
options,
|
||||
&mut self.resources,
|
||||
&frame,
|
||||
None,
|
||||
Some(width as f32),
|
||||
)?;
|
||||
color_font.glyphs.push(ColorGlyph { gid: glyph.id, instructions });
|
||||
color_font.glyph_indices.insert(glyph.id, index);
|
||||
|
||||
(color_font.slice_ids[index / 256], index as u8)
|
||||
})
|
||||
}
|
||||
|
||||
/// Assign references to the resource dictionary used by this set of color
|
||||
/// fonts.
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> ColorFontMap<Ref> {
|
||||
ColorFontMap {
|
||||
map: self.map,
|
||||
resources: self.resources.with_refs(refs),
|
||||
total_slice_count: self.total_slice_count,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> ColorFontMap<R> {
|
||||
/// Iterate over all Type3 fonts.
|
||||
///
|
||||
/// Each item of this iterator maps to a Type3 font: it contains
|
||||
/// at most 256 glyphs. A same TTF font can yield multiple Type3 fonts.
|
||||
pub fn iter(&self) -> ColorFontMapIter<'_, R> {
|
||||
ColorFontMapIter { map: self, font_index: 0, slice_index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over a [`ColorFontMap`].
|
||||
///
|
||||
/// See [`ColorFontMap::iter`].
|
||||
pub struct ColorFontMapIter<'a, R> {
|
||||
/// The map over which to iterate
|
||||
map: &'a ColorFontMap<R>,
|
||||
/// The index of TTF font on which we currently iterate
|
||||
font_index: usize,
|
||||
/// The sub-font (slice of at most 256 glyphs) at which we currently are.
|
||||
slice_index: usize,
|
||||
}
|
||||
|
||||
impl<'a, R> Iterator for ColorFontMapIter<'a, R> {
|
||||
type Item = (&'a ColorFont, ColorFontSlice);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (font, color_font) = self.map.map.get_index(self.font_index)?;
|
||||
let slice_count = (color_font.glyphs.len() / 256) + 1;
|
||||
|
||||
if self.slice_index >= slice_count {
|
||||
self.font_index += 1;
|
||||
self.slice_index = 0;
|
||||
return self.next();
|
||||
}
|
||||
|
||||
let slice = ColorFontSlice { font: font.clone(), subfont: self.slice_index };
|
||||
self.slice_index += 1;
|
||||
Some((color_font, slice))
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of at most 256 glyphs (a limit imposed on Type3 fonts by the PDF
|
||||
/// specification) that represents a part of a TTF font.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct ColorFontSlice {
|
||||
/// The original TTF font.
|
||||
pub font: Font,
|
||||
/// The index of the Type3 font, among all those that are necessary to
|
||||
/// represent the subset of the TTF font we are interested in.
|
||||
pub subfont: usize,
|
||||
}
|
||||
|
||||
/// The error when the glyph could not be converted.
|
||||
#[cold]
|
||||
fn failed_to_convert(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
|
||||
let mut diag = error!(
|
||||
glyph.span.0,
|
||||
"the glyph for {} could not be exported",
|
||||
text.glyph_text(glyph).repr()
|
||||
);
|
||||
|
||||
if text.item.font.ttf().tables().cff2.is_some() {
|
||||
diag.hint("CFF2 fonts are not currently supported");
|
||||
}
|
||||
|
||||
diag
|
||||
}
|
@ -1,394 +0,0 @@
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::visualize::{Color, ColorSpace, Paint};
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::{content_old, deflate, PdfChunk, PdfOptions, Renumber, WithResources};
|
||||
|
||||
// The names of the color spaces.
|
||||
pub const SRGB: Name<'static> = Name(b"srgb");
|
||||
pub const D65_GRAY: Name<'static> = Name(b"d65gray");
|
||||
pub const LINEAR_SRGB: Name<'static> = Name(b"linearrgb");
|
||||
|
||||
// The ICC profiles.
|
||||
static SRGB_ICC_DEFLATED: LazyLock<Vec<u8>> =
|
||||
LazyLock::new(|| deflate(typst_assets::icc::S_RGB_V4));
|
||||
static GRAY_ICC_DEFLATED: LazyLock<Vec<u8>> =
|
||||
LazyLock::new(|| deflate(typst_assets::icc::S_GREY_V4));
|
||||
|
||||
/// The color spaces present in the PDF document
|
||||
#[derive(Default)]
|
||||
pub struct ColorSpaces {
|
||||
use_srgb: bool,
|
||||
use_d65_gray: bool,
|
||||
use_linear_rgb: bool,
|
||||
}
|
||||
|
||||
impl ColorSpaces {
|
||||
/// Mark a color space as used.
|
||||
pub fn mark_as_used(&mut self, color_space: ColorSpace) {
|
||||
match color_space {
|
||||
ColorSpace::Oklch
|
||||
| ColorSpace::Oklab
|
||||
| ColorSpace::Hsl
|
||||
| ColorSpace::Hsv
|
||||
| ColorSpace::Srgb => {
|
||||
self.use_srgb = true;
|
||||
}
|
||||
ColorSpace::D65Gray => {
|
||||
self.use_d65_gray = true;
|
||||
}
|
||||
ColorSpace::LinearRgb => {
|
||||
self.use_linear_rgb = true;
|
||||
}
|
||||
ColorSpace::Cmyk => {}
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the color spaces to the PDF file.
|
||||
pub fn write_color_spaces(&self, mut spaces: Dict, refs: &ColorFunctionRefs) {
|
||||
if self.use_srgb {
|
||||
write(ColorSpace::Srgb, spaces.insert(SRGB).start(), refs);
|
||||
}
|
||||
|
||||
if self.use_d65_gray {
|
||||
write(ColorSpace::D65Gray, spaces.insert(D65_GRAY).start(), refs);
|
||||
}
|
||||
|
||||
if self.use_linear_rgb {
|
||||
write(ColorSpace::LinearRgb, spaces.insert(LINEAR_SRGB).start(), refs);
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the necessary color spaces functions and ICC profiles to the
|
||||
/// PDF file.
|
||||
pub fn write_functions(&self, chunk: &mut Chunk, refs: &ColorFunctionRefs) {
|
||||
// Write the sRGB color space.
|
||||
if let Some(id) = refs.srgb {
|
||||
chunk
|
||||
.icc_profile(id, &SRGB_ICC_DEFLATED)
|
||||
.n(3)
|
||||
.range([0.0, 1.0, 0.0, 1.0, 0.0, 1.0])
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
// Write the gray color space.
|
||||
if let Some(id) = refs.d65_gray {
|
||||
chunk
|
||||
.icc_profile(id, &GRAY_ICC_DEFLATED)
|
||||
.n(1)
|
||||
.range([0.0, 1.0])
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
}
|
||||
|
||||
/// Merge two color space usage information together: a given color space is
|
||||
/// considered to be used if it is used on either side.
|
||||
pub fn merge(&mut self, other: &Self) {
|
||||
self.use_d65_gray |= other.use_d65_gray;
|
||||
self.use_linear_rgb |= other.use_linear_rgb;
|
||||
self.use_srgb |= other.use_srgb;
|
||||
}
|
||||
}
|
||||
|
||||
/// Write the color space.
|
||||
pub fn write(
|
||||
color_space: ColorSpace,
|
||||
writer: writers::ColorSpace,
|
||||
refs: &ColorFunctionRefs,
|
||||
) {
|
||||
match color_space {
|
||||
ColorSpace::Srgb
|
||||
| ColorSpace::Oklab
|
||||
| ColorSpace::Hsl
|
||||
| ColorSpace::Hsv
|
||||
| ColorSpace::Oklch => writer.icc_based(refs.srgb.unwrap()),
|
||||
ColorSpace::D65Gray => writer.icc_based(refs.d65_gray.unwrap()),
|
||||
ColorSpace::LinearRgb => {
|
||||
writer.cal_rgb(
|
||||
[0.9505, 1.0, 1.0888],
|
||||
None,
|
||||
Some([1.0, 1.0, 1.0]),
|
||||
Some([
|
||||
0.4124, 0.2126, 0.0193, 0.3576, 0.715, 0.1192, 0.1805, 0.0722, 0.9505,
|
||||
]),
|
||||
);
|
||||
}
|
||||
ColorSpace::Cmyk => writer.device_cmyk(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Global references for color conversion functions.
|
||||
///
|
||||
/// These functions are only written once (at most, they are not written if not
|
||||
/// needed) in the final document, and be shared by all color space
|
||||
/// dictionaries.
|
||||
pub struct ColorFunctionRefs {
|
||||
pub srgb: Option<Ref>,
|
||||
d65_gray: Option<Ref>,
|
||||
}
|
||||
|
||||
impl Renumber for ColorFunctionRefs {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if let Some(r) = &mut self.srgb {
|
||||
r.renumber(offset);
|
||||
}
|
||||
if let Some(r) = &mut self.d65_gray {
|
||||
r.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate all necessary [`ColorFunctionRefs`].
|
||||
pub fn alloc_color_functions_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut used_color_spaces = ColorSpaces::default();
|
||||
|
||||
// if context.options.standards.pdfa {
|
||||
// used_color_spaces.mark_as_used(ColorSpace::Srgb);
|
||||
// }
|
||||
|
||||
context.resources.traverse(&mut |r| {
|
||||
used_color_spaces.merge(&r.colors);
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
let refs = ColorFunctionRefs {
|
||||
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
|
||||
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
|
||||
};
|
||||
|
||||
Ok((chunk, refs))
|
||||
}
|
||||
|
||||
/// Encodes the color into four f32s, which can be used in a PDF file.
|
||||
/// Ensures that the values are in the range [0.0, 1.0].
|
||||
///
|
||||
/// # Why?
|
||||
/// - Oklab: The a and b components are in the range [-0.5, 0.5] and the PDF
|
||||
/// specifies (and some readers enforce) that all color values be in the range
|
||||
/// [0.0, 1.0]. This means that the PostScript function and the encoded color
|
||||
/// must be offset by 0.5.
|
||||
/// - HSV/HSL: The hue component is in the range [0.0, 360.0] and the PDF format
|
||||
/// specifies that it must be in the range [0.0, 1.0]. This means that the
|
||||
/// PostScript function and the encoded color must be divided by 360.0.
|
||||
pub trait ColorEncode {
|
||||
/// Performs the color to PDF f32 array conversion.
|
||||
fn encode(&self, color: Color) -> [f32; 4];
|
||||
}
|
||||
|
||||
impl ColorEncode for ColorSpace {
|
||||
fn encode(&self, color: Color) -> [f32; 4] {
|
||||
match self {
|
||||
ColorSpace::Oklab | ColorSpace::Oklch | ColorSpace::Hsl | ColorSpace::Hsv => {
|
||||
color.to_space(ColorSpace::Srgb).to_vec4()
|
||||
}
|
||||
_ => color.to_space(*self).to_vec4(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes a paint into either a fill or stroke color.
|
||||
pub(super) trait PaintEncode {
|
||||
/// Set the paint as the fill color.
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()>;
|
||||
|
||||
/// Set the paint as the stroke color.
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()>;
|
||||
}
|
||||
|
||||
impl PaintEncode for Paint {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
|
||||
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
|
||||
Self::Pattern(pattern) => pattern.set_as_fill(ctx, on_text, transforms),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
|
||||
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
|
||||
Self::Pattern(pattern) => pattern.set_as_stroke(ctx, on_text, transforms),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PaintEncode for Color {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
_: bool,
|
||||
_: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Color::Luma(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||
ctx.set_fill_color_space(D65_GRAY);
|
||||
|
||||
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
|
||||
ctx.content.set_fill_color([l]);
|
||||
}
|
||||
Color::LinearRgb(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
|
||||
ctx.set_fill_color_space(LINEAR_SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
|
||||
ctx.content.set_fill_color([r, g, b]);
|
||||
}
|
||||
// Oklab & friends are encoded as RGB.
|
||||
Color::Rgb(_)
|
||||
| Color::Oklab(_)
|
||||
| Color::Oklch(_)
|
||||
| Color::Hsl(_)
|
||||
| Color::Hsv(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
|
||||
ctx.set_fill_color_space(SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
|
||||
ctx.content.set_fill_color([r, g, b]);
|
||||
}
|
||||
Color::Cmyk(_) => {
|
||||
check_cmyk_allowed(ctx.options)?;
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
|
||||
ctx.content.set_fill_cmyk(c, m, y, k);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
_: bool,
|
||||
_: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
match self {
|
||||
Color::Luma(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||
ctx.set_stroke_color_space(D65_GRAY);
|
||||
|
||||
let [l, _, _, _] = ColorSpace::D65Gray.encode(*self);
|
||||
ctx.content.set_stroke_color([l]);
|
||||
}
|
||||
Color::LinearRgb(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::LinearRgb);
|
||||
ctx.set_stroke_color_space(LINEAR_SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::LinearRgb.encode(*self);
|
||||
ctx.content.set_stroke_color([r, g, b]);
|
||||
}
|
||||
// Oklab & friends are encoded as RGB.
|
||||
Color::Rgb(_)
|
||||
| Color::Oklab(_)
|
||||
| Color::Oklch(_)
|
||||
| Color::Hsl(_)
|
||||
| Color::Hsv(_) => {
|
||||
ctx.resources.colors.mark_as_used(ColorSpace::Srgb);
|
||||
ctx.set_stroke_color_space(SRGB);
|
||||
|
||||
let [r, g, b, _] = ColorSpace::Srgb.encode(*self);
|
||||
ctx.content.set_stroke_color([r, g, b]);
|
||||
}
|
||||
Color::Cmyk(_) => {
|
||||
check_cmyk_allowed(ctx.options)?;
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let [c, m, y, k] = ColorSpace::Cmyk.encode(*self);
|
||||
ctx.content.set_stroke_cmyk(c, m, y, k);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extra color space functions.
|
||||
pub(super) trait ColorSpaceExt {
|
||||
/// Returns the range of the color space.
|
||||
fn range(self) -> &'static [f32];
|
||||
|
||||
/// Converts a color to the color space.
|
||||
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4>;
|
||||
}
|
||||
|
||||
impl ColorSpaceExt for ColorSpace {
|
||||
fn range(self) -> &'static [f32] {
|
||||
match self {
|
||||
ColorSpace::D65Gray => &[0.0, 1.0],
|
||||
ColorSpace::Oklab => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Oklch => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::LinearRgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Srgb => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Cmyk => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Hsl => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
ColorSpace::Hsv => &[0.0, 1.0, 0.0, 1.0, 0.0, 1.0],
|
||||
}
|
||||
}
|
||||
|
||||
fn convert<U: QuantizedColor>(self, color: Color) -> ArrayVec<U, 4> {
|
||||
let components = self.encode(color);
|
||||
|
||||
self.range()
|
||||
.chunks(2)
|
||||
.zip(components)
|
||||
.map(|(range, component)| U::quantize(component, [range[0], range[1]]))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Quantizes a color component to a specific type.
|
||||
pub(super) trait QuantizedColor {
|
||||
fn quantize(color: f32, range: [f32; 2]) -> Self;
|
||||
}
|
||||
|
||||
impl QuantizedColor for u16 {
|
||||
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
|
||||
let value = (color - min) / (max - min);
|
||||
(value * Self::MAX as f32).round().clamp(0.0, Self::MAX as f32) as Self
|
||||
}
|
||||
}
|
||||
|
||||
impl QuantizedColor for f32 {
|
||||
fn quantize(color: f32, [min, max]: [f32; 2]) -> Self {
|
||||
color.clamp(min, max)
|
||||
}
|
||||
}
|
||||
|
||||
/// Fails with an error if PDF/A processing is enabled.
|
||||
pub(super) fn check_cmyk_allowed(options: &PdfOptions) -> SourceResult<()> {
|
||||
// if options.standards.pdfa {
|
||||
// bail!(
|
||||
// Span::detached(),
|
||||
// "cmyk colors are not currently supported by PDF/A export"
|
||||
// );
|
||||
// }
|
||||
Ok(())
|
||||
}
|
@ -1,826 +0,0 @@
|
||||
//! Generic writer for PDF content.
|
||||
//!
|
||||
//! It is used to write page contents, color glyph instructions, and patterns.
|
||||
//!
|
||||
//! See also [`pdf_writer::Content`].
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{
|
||||
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
|
||||
};
|
||||
use pdf_writer::writers::PositionedItems;
|
||||
use pdf_writer::{Content, Finish, Name, Rect, Str};
|
||||
use typst_library::diag::{bail, error, SourceDiagnostic, SourceResult};
|
||||
use typst_library::foundations::Repr;
|
||||
use typst_library::layout::{
|
||||
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
|
||||
};
|
||||
use typst_library::model::Destination;
|
||||
use typst_library::text::color::should_outline;
|
||||
use typst_library::text::{Font, Glyph, TextItem, TextItemView};
|
||||
use typst_library::visualize::{
|
||||
FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint, Path, PathItem,
|
||||
Shape,
|
||||
};
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::{Deferred, Numeric, SliceExt};
|
||||
|
||||
use crate::color_font::ColorFontMap;
|
||||
use crate::color_old::PaintEncode;
|
||||
use crate::extg_old::ExtGState;
|
||||
use crate::image_old::deferred_image;
|
||||
use crate::resources_old::Resources;
|
||||
use crate::{deflate_deferred, AbsExt, ContentExt, EmExt, PdfOptions, StrExt};
|
||||
|
||||
/// Encode a [`Frame`] into a content stream.
|
||||
///
|
||||
/// The resources that were used in the stream will be added to `resources`.
|
||||
///
|
||||
/// `color_glyph_width` should be `None` unless the `Frame` represents a [color
|
||||
/// glyph].
|
||||
///
|
||||
/// [color glyph]: `crate::color_font`
|
||||
pub fn build(
|
||||
options: &PdfOptions,
|
||||
resources: &mut Resources<()>,
|
||||
frame: &Frame,
|
||||
fill: Option<Paint>,
|
||||
color_glyph_width: Option<f32>,
|
||||
) -> SourceResult<Encoded> {
|
||||
let size = frame.size();
|
||||
let mut ctx = Builder::new(options, resources, size);
|
||||
|
||||
if let Some(width) = color_glyph_width {
|
||||
ctx.content.start_color_glyph(width);
|
||||
}
|
||||
|
||||
// Make the coordinate system start at the top-left.
|
||||
ctx.transform(
|
||||
// Make the Y axis go upwards
|
||||
Transform::scale(Ratio::one(), -Ratio::one())
|
||||
// Also move the origin to the top left corner
|
||||
.post_concat(Transform::translate(Abs::zero(), size.y)),
|
||||
);
|
||||
|
||||
if let Some(fill) = fill {
|
||||
let shape = Geometry::Rect(frame.size()).filled(fill);
|
||||
write_shape(&mut ctx, Point::zero(), &shape)?;
|
||||
}
|
||||
|
||||
// Encode the frame into the content stream.
|
||||
write_frame(&mut ctx, frame)?;
|
||||
|
||||
Ok(Encoded {
|
||||
size,
|
||||
content: deflate_deferred(ctx.content.finish()),
|
||||
uses_opacities: ctx.uses_opacities,
|
||||
links: ctx.links,
|
||||
})
|
||||
}
|
||||
|
||||
/// An encoded content stream.
|
||||
pub struct Encoded {
|
||||
/// The dimensions of the content.
|
||||
pub size: Size,
|
||||
/// The actual content stream.
|
||||
pub content: Deferred<Vec<u8>>,
|
||||
/// Whether the content opacities.
|
||||
pub uses_opacities: bool,
|
||||
/// Links in the PDF coordinate system.
|
||||
pub links: Vec<(Destination, Rect)>,
|
||||
}
|
||||
|
||||
/// An exporter for a single PDF content stream.
|
||||
///
|
||||
/// Content streams are a series of PDF commands. They can reference external
|
||||
/// objects only through resources.
|
||||
///
|
||||
/// Content streams can be used for page contents, but also to describe color
|
||||
/// glyphs and patterns.
|
||||
pub struct Builder<'a, R = ()> {
|
||||
/// Settings for PDF export.
|
||||
pub(crate) options: &'a PdfOptions<'a>,
|
||||
/// A list of all resources that are used in the content stream.
|
||||
pub(crate) resources: &'a mut Resources<R>,
|
||||
/// The PDF content stream that is being built.
|
||||
pub content: Content,
|
||||
/// Current graphic state.
|
||||
state: State,
|
||||
/// Stack of saved graphic states.
|
||||
saves: Vec<State>,
|
||||
/// Whether any stroke or fill was not totally opaque.
|
||||
uses_opacities: bool,
|
||||
/// All clickable links that are present in this content.
|
||||
links: Vec<(Destination, Rect)>,
|
||||
}
|
||||
|
||||
impl<'a, R> Builder<'a, R> {
|
||||
/// Create a new content builder.
|
||||
pub fn new(
|
||||
options: &'a PdfOptions<'a>,
|
||||
resources: &'a mut Resources<R>,
|
||||
size: Size,
|
||||
) -> Self {
|
||||
Builder {
|
||||
options,
|
||||
resources,
|
||||
uses_opacities: false,
|
||||
content: Content::new(),
|
||||
state: State::new(size),
|
||||
saves: vec![],
|
||||
links: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A simulated graphics state used to deduplicate graphics state changes and
|
||||
/// keep track of the current transformation matrix for link annotations.
|
||||
#[derive(Debug, Clone)]
|
||||
struct State {
|
||||
/// The transform of the current item.
|
||||
transform: Transform,
|
||||
/// The transform of first hard frame in the hierarchy.
|
||||
container_transform: Transform,
|
||||
/// The size of the first hard frame in the hierarchy.
|
||||
size: Size,
|
||||
/// The current font.
|
||||
font: Option<(Font, Abs)>,
|
||||
/// The current fill paint.
|
||||
fill: Option<Paint>,
|
||||
/// The color space of the current fill paint.
|
||||
fill_space: Option<Name<'static>>,
|
||||
/// The current external graphic state.
|
||||
external_graphics_state: ExtGState,
|
||||
/// The current stroke paint.
|
||||
stroke: Option<FixedStroke>,
|
||||
/// The color space of the current stroke paint.
|
||||
stroke_space: Option<Name<'static>>,
|
||||
/// The current text rendering mode.
|
||||
text_rendering_mode: TextRenderingMode,
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Creates a new, clean state for a given `size`.
|
||||
pub fn new(size: Size) -> Self {
|
||||
Self {
|
||||
transform: Transform::identity(),
|
||||
container_transform: Transform::identity(),
|
||||
size,
|
||||
font: None,
|
||||
fill: None,
|
||||
fill_space: None,
|
||||
external_graphics_state: ExtGState::default(),
|
||||
stroke: None,
|
||||
stroke_space: None,
|
||||
text_rendering_mode: TextRenderingMode::Fill,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the [`Transforms`] structure for the current item.
|
||||
pub fn transforms(&self, size: Size, pos: Point) -> Transforms {
|
||||
Transforms {
|
||||
transform: self.transform.pre_concat(Transform::translate(pos.x, pos.y)),
|
||||
container_transform: self.container_transform,
|
||||
container_size: self.size,
|
||||
size,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Subset of the state used to calculate the transform of gradients and patterns.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub(super) struct Transforms {
|
||||
/// The transform of the current item.
|
||||
pub transform: Transform,
|
||||
/// The transform of first hard frame in the hierarchy.
|
||||
pub container_transform: Transform,
|
||||
/// The size of the first hard frame in the hierarchy.
|
||||
pub container_size: Size,
|
||||
/// The size of the item.
|
||||
pub size: Size,
|
||||
}
|
||||
|
||||
impl Builder<'_, ()> {
|
||||
fn save_state(&mut self) -> SourceResult<()> {
|
||||
self.saves.push(self.state.clone());
|
||||
self.content.save_state_checked()
|
||||
}
|
||||
|
||||
fn restore_state(&mut self) {
|
||||
self.content.restore_state();
|
||||
self.state = self.saves.pop().expect("missing state save");
|
||||
}
|
||||
|
||||
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
|
||||
let current_state = &self.state.external_graphics_state;
|
||||
if current_state != graphics_state {
|
||||
let index = self.resources.ext_gs.insert(*graphics_state);
|
||||
let name = eco_format!("Gs{index}");
|
||||
self.content.set_parameters(Name(name.as_bytes()));
|
||||
|
||||
self.state.external_graphics_state = *graphics_state;
|
||||
if graphics_state.uses_opacities() {
|
||||
self.uses_opacities = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
|
||||
let get_opacity = |paint: &Paint| {
|
||||
let color = match paint {
|
||||
Paint::Solid(color) => *color,
|
||||
Paint::Gradient(_) | Paint::Pattern(_) => return 255,
|
||||
};
|
||||
|
||||
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
|
||||
};
|
||||
|
||||
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
|
||||
let fill_opacity = fill.map_or(255, get_opacity);
|
||||
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
|
||||
}
|
||||
|
||||
fn reset_opacities(&mut self) {
|
||||
self.set_external_graphics_state(&ExtGState {
|
||||
stroke_opacity: 255,
|
||||
fill_opacity: 255,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn transform(&mut self, transform: Transform) {
|
||||
let Transform { sx, ky, kx, sy, tx, ty } = transform;
|
||||
self.state.transform = self.state.transform.pre_concat(transform);
|
||||
if self.state.container_transform.is_identity() {
|
||||
self.state.container_transform = self.state.transform;
|
||||
}
|
||||
self.content.transform([
|
||||
sx.get() as _,
|
||||
ky.get() as _,
|
||||
kx.get() as _,
|
||||
sy.get() as _,
|
||||
tx.to_f32(),
|
||||
ty.to_f32(),
|
||||
]);
|
||||
}
|
||||
|
||||
fn group_transform(&mut self, transform: Transform) {
|
||||
self.state.container_transform =
|
||||
self.state.container_transform.pre_concat(transform);
|
||||
}
|
||||
|
||||
fn set_font(&mut self, font: &Font, size: Abs) {
|
||||
if self.state.font.as_ref().map(|(f, s)| (f, *s)) != Some((font, size)) {
|
||||
let index = self.resources.fonts.insert(font.clone());
|
||||
let name = eco_format!("F{index}");
|
||||
self.content.set_font(Name(name.as_bytes()), size.to_f32());
|
||||
self.state.font = Some((font.clone(), size));
|
||||
}
|
||||
}
|
||||
|
||||
fn size(&mut self, size: Size) {
|
||||
self.state.size = size;
|
||||
}
|
||||
|
||||
fn set_fill(
|
||||
&mut self,
|
||||
fill: &Paint,
|
||||
on_text: bool,
|
||||
transforms: Transforms,
|
||||
) -> SourceResult<()> {
|
||||
if self.state.fill.as_ref() != Some(fill)
|
||||
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
|
||||
{
|
||||
fill.set_as_fill(self, on_text, transforms)?;
|
||||
self.state.fill = Some(fill.clone());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
|
||||
if self.state.fill_space != Some(space) {
|
||||
self.content.set_fill_color_space(ColorSpaceOperand::Named(space));
|
||||
self.state.fill_space = Some(space);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_fill_color_space(&mut self) {
|
||||
self.state.fill_space = None;
|
||||
}
|
||||
|
||||
fn set_stroke(
|
||||
&mut self,
|
||||
stroke: &FixedStroke,
|
||||
on_text: bool,
|
||||
transforms: Transforms,
|
||||
) -> SourceResult<()> {
|
||||
if self.state.stroke.as_ref() != Some(stroke)
|
||||
|| matches!(
|
||||
self.state.stroke.as_ref().map(|s| &s.paint),
|
||||
Some(Paint::Gradient(_))
|
||||
)
|
||||
{
|
||||
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
|
||||
paint.set_as_stroke(self, on_text, transforms)?;
|
||||
|
||||
self.content.set_line_width(thickness.to_f32());
|
||||
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
|
||||
self.content.set_line_cap(to_pdf_line_cap(*cap));
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.join) != Some(join) {
|
||||
self.content.set_line_join(to_pdf_line_join(*join));
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.dash) != Some(dash) {
|
||||
if let Some(pattern) = dash {
|
||||
self.content.set_dash_pattern(
|
||||
pattern.array.iter().map(|l| l.to_f32()),
|
||||
pattern.phase.to_f32(),
|
||||
);
|
||||
} else {
|
||||
self.content.set_dash_pattern([], 0.0);
|
||||
}
|
||||
}
|
||||
if self.state.stroke.as_ref().map(|s| &s.miter_limit) != Some(miter_limit) {
|
||||
self.content.set_miter_limit(miter_limit.get() as f32);
|
||||
}
|
||||
self.state.stroke = Some(stroke.clone());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
|
||||
if self.state.stroke_space != Some(space) {
|
||||
self.content.set_stroke_color_space(ColorSpaceOperand::Named(space));
|
||||
self.state.stroke_space = Some(space);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reset_stroke_color_space(&mut self) {
|
||||
self.state.stroke_space = None;
|
||||
}
|
||||
|
||||
fn set_text_rendering_mode(&mut self, mode: TextRenderingMode) {
|
||||
if self.state.text_rendering_mode != mode {
|
||||
self.content.set_text_rendering_mode(mode);
|
||||
self.state.text_rendering_mode = mode;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode a frame into the content stream.
|
||||
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
|
||||
for &(pos, ref item) in frame.items() {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
match item {
|
||||
FrameItem::Group(group) => write_group(ctx, pos, group)?,
|
||||
FrameItem::Text(text) => write_text(ctx, pos, text)?,
|
||||
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
|
||||
FrameItem::Image(image, size, span) => {
|
||||
write_image(ctx, x, y, image, *size, *span)?
|
||||
}
|
||||
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
|
||||
FrameItem::Tag(_) => {}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a group into the content stream.
|
||||
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
|
||||
let translation = Transform::translate(pos.x, pos.y);
|
||||
|
||||
ctx.save_state()?;
|
||||
|
||||
if group.frame.kind().is_hard() {
|
||||
ctx.group_transform(
|
||||
ctx.state
|
||||
.transform
|
||||
.post_concat(ctx.state.container_transform.invert().unwrap())
|
||||
.pre_concat(translation)
|
||||
.pre_concat(group.transform),
|
||||
);
|
||||
ctx.size(group.frame.size());
|
||||
}
|
||||
|
||||
ctx.transform(translation.pre_concat(group.transform));
|
||||
if let Some(clip_path) = &group.clip_path {
|
||||
write_path(ctx, 0.0, 0.0, clip_path);
|
||||
ctx.content.clip_nonzero();
|
||||
ctx.content.end_path();
|
||||
}
|
||||
|
||||
write_frame(ctx, &group.frame)?;
|
||||
ctx.restore_state();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a text run into the content stream.
|
||||
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
|
||||
// if ctx.options.standards.pdfa && text.font.info().is_last_resort() {
|
||||
// bail!(
|
||||
// Span::find(text.glyphs.iter().map(|g| g.span.0)),
|
||||
// "the text {} could not be displayed with any font",
|
||||
// &text.text,
|
||||
// );
|
||||
// }
|
||||
|
||||
let outline_glyphs =
|
||||
text.glyphs.iter().filter(|g| should_outline(&text.font, g)).count();
|
||||
|
||||
if outline_glyphs == text.glyphs.len() {
|
||||
write_normal_text(ctx, pos, TextItemView::full(text))?;
|
||||
} else if outline_glyphs == 0 {
|
||||
write_complex_glyphs(ctx, pos, TextItemView::full(text))?;
|
||||
} else {
|
||||
// Otherwise we need to split it into smaller text runs.
|
||||
let mut offset = 0;
|
||||
let mut position_in_run = Abs::zero();
|
||||
for (should_outline, sub_run) in
|
||||
text.glyphs.group_by_key(|g| should_outline(&text.font, g))
|
||||
{
|
||||
let end = offset + sub_run.len();
|
||||
|
||||
// Build a sub text-run
|
||||
let text_item_view = TextItemView::from_glyph_range(text, offset..end);
|
||||
|
||||
// Adjust the position of the run on the line
|
||||
let pos = pos + Point::new(position_in_run, Abs::zero());
|
||||
position_in_run += text_item_view.width();
|
||||
offset = end;
|
||||
|
||||
// Actually write the sub text-run.
|
||||
if should_outline {
|
||||
write_normal_text(ctx, pos, text_item_view)?;
|
||||
} else {
|
||||
write_complex_glyphs(ctx, pos, text_item_view)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encodes a text run (without any color glyph) into the content stream.
|
||||
fn write_normal_text(
|
||||
ctx: &mut Builder,
|
||||
pos: Point,
|
||||
text: TextItemView,
|
||||
) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
*ctx.resources.languages.entry(text.item.lang).or_insert(0) += text.glyph_range.len();
|
||||
|
||||
let glyph_set = ctx.resources.glyph_sets.entry(text.item.font.clone()).or_default();
|
||||
for g in text.glyphs() {
|
||||
glyph_set.entry(g.id).or_insert_with(|| text.glyph_text(g));
|
||||
}
|
||||
|
||||
let fill_transform = ctx.state.transforms(Size::zero(), pos);
|
||||
ctx.set_fill(&text.item.fill, true, fill_transform)?;
|
||||
|
||||
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
|
||||
if stroke.thickness.to_f32() > 0.0 {
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(stroke) = stroke {
|
||||
ctx.set_stroke(stroke, true, fill_transform)?;
|
||||
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
|
||||
} else {
|
||||
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
|
||||
}
|
||||
|
||||
ctx.set_font(&text.item.font, text.item.size);
|
||||
ctx.set_opacities(text.item.stroke.as_ref(), Some(&text.item.fill));
|
||||
ctx.content.begin_text();
|
||||
|
||||
// Position the text.
|
||||
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
|
||||
|
||||
let mut positioned = ctx.content.show_positioned();
|
||||
let mut items = positioned.items();
|
||||
let mut adjustment = Em::zero();
|
||||
let mut encoded = vec![];
|
||||
|
||||
let glyph_remapper = ctx
|
||||
.resources
|
||||
.glyph_remappers
|
||||
.entry(text.item.font.clone())
|
||||
.or_default();
|
||||
|
||||
// Write the glyphs with kerning adjustments.
|
||||
for glyph in text.glyphs() {
|
||||
// if ctx.options.standards.pdfa && glyph.id == 0 {
|
||||
// bail!(tofu(&text, glyph));
|
||||
// }
|
||||
|
||||
adjustment += glyph.x_offset;
|
||||
|
||||
if !adjustment.is_zero() {
|
||||
if !encoded.is_empty() {
|
||||
show_text(&mut items, &encoded);
|
||||
encoded.clear();
|
||||
}
|
||||
|
||||
items.adjust(-adjustment.to_font_units());
|
||||
adjustment = Em::zero();
|
||||
}
|
||||
|
||||
// In PDF, we use CIDs to index the glyphs in a font, not GIDs. What a
|
||||
// CID actually refers to depends on the type of font we are embedding:
|
||||
//
|
||||
// - For TrueType fonts, the CIDs are defined by an external mapping.
|
||||
// - For SID-keyed CFF fonts, the CID is the same as the GID in the font.
|
||||
// - For CID-keyed CFF fonts, the CID refers to the CID in the font.
|
||||
//
|
||||
// (See in the PDF-spec for more details on this.)
|
||||
//
|
||||
// However, in our case:
|
||||
// - We use the identity-mapping for TrueType fonts.
|
||||
// - SID-keyed fonts will get converted into CID-keyed fonts by the
|
||||
// subsetter.
|
||||
// - CID-keyed fonts will be rewritten in a way so that the mapping
|
||||
// between CID and GID is always the identity mapping, regardless of
|
||||
// the mapping before.
|
||||
//
|
||||
// Because of this, we can always use the remapped GID as the CID,
|
||||
// regardless of which type of font we are actually embedding.
|
||||
let cid = glyph_remapper.remap(glyph.id);
|
||||
encoded.push((cid >> 8) as u8);
|
||||
encoded.push((cid & 0xff) as u8);
|
||||
|
||||
if let Some(advance) = text.item.font.advance(glyph.id) {
|
||||
adjustment += glyph.x_advance - advance;
|
||||
}
|
||||
|
||||
adjustment -= glyph.x_offset;
|
||||
}
|
||||
|
||||
if !encoded.is_empty() {
|
||||
show_text(&mut items, &encoded);
|
||||
}
|
||||
|
||||
items.finish();
|
||||
positioned.finish();
|
||||
ctx.content.end_text();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Shows text, ensuring that each individual string doesn't exceed the
|
||||
/// implementation limits.
|
||||
fn show_text(items: &mut PositionedItems, encoded: &[u8]) {
|
||||
for chunk in encoded.chunks(Str::PDFA_LIMIT) {
|
||||
items.show(Str(chunk));
|
||||
}
|
||||
}
|
||||
|
||||
/// Encodes a text run made only of color glyphs into the content stream
|
||||
fn write_complex_glyphs(
|
||||
ctx: &mut Builder,
|
||||
pos: Point,
|
||||
text: TextItemView,
|
||||
) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
let mut last_font = None;
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
ctx.content.begin_text();
|
||||
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
|
||||
// So that the next call to ctx.set_font() will change the font to one that
|
||||
// displays regular glyphs and not color glyphs.
|
||||
ctx.state.font = None;
|
||||
|
||||
let glyph_set = ctx
|
||||
.resources
|
||||
.color_glyph_sets
|
||||
.entry(text.item.font.clone())
|
||||
.or_default();
|
||||
|
||||
for glyph in text.glyphs() {
|
||||
// if ctx.options.standards.pdfa && glyph.id == 0 {
|
||||
// bail!(tofu(&text, glyph));
|
||||
// }
|
||||
|
||||
// Retrieve the Type3 font reference and the glyph index in the font.
|
||||
let color_fonts = ctx
|
||||
.resources
|
||||
.color_fonts
|
||||
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
|
||||
|
||||
let (font, index) = color_fonts.get(ctx.options, &text, glyph)?;
|
||||
|
||||
if last_font != Some(font) {
|
||||
ctx.content.set_font(
|
||||
Name(eco_format!("Cf{}", font).as_bytes()),
|
||||
text.item.size.to_f32(),
|
||||
);
|
||||
last_font = Some(font);
|
||||
}
|
||||
|
||||
ctx.content.show(Str(&[index]));
|
||||
|
||||
glyph_set.entry(glyph.id).or_insert_with(|| text.glyph_text(glyph));
|
||||
}
|
||||
ctx.content.end_text();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a geometrical shape into the content stream.
|
||||
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
|
||||
let x = pos.x.to_f32();
|
||||
let y = pos.y.to_f32();
|
||||
|
||||
let stroke = shape.stroke.as_ref().and_then(|stroke| {
|
||||
if stroke.thickness.to_f32() > 0.0 {
|
||||
Some(stroke)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
if shape.fill.is_none() && stroke.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(fill) = &shape.fill {
|
||||
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
|
||||
}
|
||||
|
||||
if let Some(stroke) = stroke {
|
||||
ctx.set_stroke(
|
||||
stroke,
|
||||
false,
|
||||
ctx.state.transforms(shape.geometry.bbox_size(), pos),
|
||||
)?;
|
||||
}
|
||||
|
||||
ctx.set_opacities(stroke, shape.fill.as_ref());
|
||||
|
||||
match shape.geometry {
|
||||
Geometry::Line(target) => {
|
||||
let dx = target.x.to_f32();
|
||||
let dy = target.y.to_f32();
|
||||
ctx.content.move_to(x, y);
|
||||
ctx.content.line_to(x + dx, y + dy);
|
||||
}
|
||||
Geometry::Rect(size) => {
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
if w.abs() > f32::EPSILON && h.abs() > f32::EPSILON {
|
||||
ctx.content.rect(x, y, w, h);
|
||||
}
|
||||
}
|
||||
Geometry::Path(ref path) => {
|
||||
write_path(ctx, x, y, path);
|
||||
}
|
||||
}
|
||||
|
||||
match (&shape.fill, &shape.fill_rule, stroke) {
|
||||
(None, _, None) => unreachable!(),
|
||||
(Some(_), FillRule::NonZero, None) => ctx.content.fill_nonzero(),
|
||||
(Some(_), FillRule::EvenOdd, None) => ctx.content.fill_even_odd(),
|
||||
(None, _, Some(_)) => ctx.content.stroke(),
|
||||
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
|
||||
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Encode a bezier path into the content stream.
|
||||
fn write_path(ctx: &mut Builder, x: f32, y: f32, path: &Path) {
|
||||
for elem in &path.0 {
|
||||
match elem {
|
||||
PathItem::MoveTo(p) => {
|
||||
ctx.content.move_to(x + p.x.to_f32(), y + p.y.to_f32())
|
||||
}
|
||||
PathItem::LineTo(p) => {
|
||||
ctx.content.line_to(x + p.x.to_f32(), y + p.y.to_f32())
|
||||
}
|
||||
PathItem::CubicTo(p1, p2, p3) => ctx.content.cubic_to(
|
||||
x + p1.x.to_f32(),
|
||||
y + p1.y.to_f32(),
|
||||
x + p2.x.to_f32(),
|
||||
y + p2.y.to_f32(),
|
||||
x + p3.x.to_f32(),
|
||||
y + p3.y.to_f32(),
|
||||
),
|
||||
PathItem::ClosePath => ctx.content.close_path(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode a vector or raster image into the content stream.
|
||||
fn write_image(
|
||||
ctx: &mut Builder,
|
||||
x: f32,
|
||||
y: f32,
|
||||
image: &Image,
|
||||
size: Size,
|
||||
span: Span,
|
||||
) -> SourceResult<()> {
|
||||
let index = ctx.resources.images.insert(image.clone());
|
||||
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
|
||||
let (image, color_space) = deferred_image(image.clone(), false);
|
||||
if let Some(color_space) = color_space {
|
||||
ctx.resources.colors.mark_as_used(color_space);
|
||||
}
|
||||
(image, span)
|
||||
});
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
let name = eco_format!("Im{index}");
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
ctx.content.save_state_checked()?;
|
||||
ctx.content.transform([w, 0.0, 0.0, -h, x, y + h]);
|
||||
|
||||
if let Some(alt) = image.alt() {
|
||||
// if ctx.options.standards.pdfa && alt.len() > Str::PDFA_LIMIT {
|
||||
// bail!(span, "the image's alt text is too long");
|
||||
// }
|
||||
|
||||
let mut image_span =
|
||||
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
|
||||
let mut image_alt = image_span.properties();
|
||||
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
|
||||
image_alt.finish();
|
||||
image_span.finish();
|
||||
|
||||
ctx.content.x_object(Name(name.as_bytes()));
|
||||
ctx.content.end_marked_content();
|
||||
} else {
|
||||
ctx.content.x_object(Name(name.as_bytes()));
|
||||
}
|
||||
|
||||
ctx.content.restore_state();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Save a link for later writing in the annotations dictionary.
|
||||
fn write_link(ctx: &mut Builder, pos: Point, dest: &Destination, size: Size) {
|
||||
let mut min_x = Abs::inf();
|
||||
let mut min_y = Abs::inf();
|
||||
let mut max_x = -Abs::inf();
|
||||
let mut max_y = -Abs::inf();
|
||||
|
||||
// Compute the bounding box of the transformed link.
|
||||
for point in [
|
||||
pos,
|
||||
pos + Point::with_x(size.x),
|
||||
pos + Point::with_y(size.y),
|
||||
pos + size.to_point(),
|
||||
] {
|
||||
let t = point.transform(ctx.state.transform);
|
||||
min_x.set_min(t.x);
|
||||
min_y.set_min(t.y);
|
||||
max_x.set_max(t.x);
|
||||
max_y.set_max(t.y);
|
||||
}
|
||||
|
||||
let x1 = min_x.to_f32();
|
||||
let x2 = max_x.to_f32();
|
||||
let y1 = max_y.to_f32();
|
||||
let y2 = min_y.to_f32();
|
||||
let rect = Rect::new(x1, y1, x2, y2);
|
||||
|
||||
ctx.links.push((dest.clone(), rect));
|
||||
}
|
||||
|
||||
fn to_pdf_line_cap(cap: LineCap) -> LineCapStyle {
|
||||
match cap {
|
||||
LineCap::Butt => LineCapStyle::ButtCap,
|
||||
LineCap::Round => LineCapStyle::RoundCap,
|
||||
LineCap::Square => LineCapStyle::ProjectingSquareCap,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_pdf_line_join(join: LineJoin) -> LineJoinStyle {
|
||||
match join {
|
||||
LineJoin::Miter => LineJoinStyle::MiterJoin,
|
||||
LineJoin::Round => LineJoinStyle::RoundJoin,
|
||||
LineJoin::Bevel => LineJoinStyle::BevelJoin,
|
||||
}
|
||||
}
|
||||
|
||||
/// The error when there is a tofu glyph.
|
||||
#[cold]
|
||||
fn tofu(text: &TextItemView, glyph: &Glyph) -> SourceDiagnostic {
|
||||
error!(
|
||||
glyph.span.0,
|
||||
"the text {} could not be displayed with any font",
|
||||
text.glyph_text(glyph).repr(),
|
||||
)
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use pdf_writer::Ref;
|
||||
use typst_library::diag::SourceResult;
|
||||
|
||||
use crate::{PdfChunk, WithGlobalRefs};
|
||||
|
||||
/// A PDF external graphics state.
|
||||
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct ExtGState {
|
||||
// In the range 0-255, needs to be divided before being written into the graphics state!
|
||||
pub stroke_opacity: u8,
|
||||
// In the range 0-255, needs to be divided before being written into the graphics state!
|
||||
pub fill_opacity: u8,
|
||||
}
|
||||
|
||||
impl Default for ExtGState {
|
||||
fn default() -> Self {
|
||||
Self { stroke_opacity: 255, fill_opacity: 255 }
|
||||
}
|
||||
}
|
||||
|
||||
impl ExtGState {
|
||||
pub fn uses_opacities(&self) -> bool {
|
||||
self.stroke_opacity != 255 || self.fill_opacity != 255
|
||||
}
|
||||
}
|
||||
|
||||
/// Embed all used external graphics states into the PDF.
|
||||
pub fn write_graphic_states(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<ExtGState, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for external_gs in resources.ext_gs.items() {
|
||||
if out.contains_key(external_gs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let id = chunk.alloc();
|
||||
out.insert(*external_gs, id);
|
||||
chunk
|
||||
.ext_graphics(id)
|
||||
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
|
||||
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
@ -1,278 +0,0 @@
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
|
||||
use pdf_writer::writers::{FontDescriptor, WMode};
|
||||
use pdf_writer::{Chunk, Filter, Finish, Name, Rect, Ref, Str};
|
||||
use subsetter::GlyphRemapper;
|
||||
use ttf_parser::{name_id, GlyphId, Tag};
|
||||
use typst_library::diag::{At, SourceResult};
|
||||
use typst_library::text::Font;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::SliceExt;
|
||||
|
||||
use crate::{deflate, EmExt, NameExt, PdfChunk, WithGlobalRefs};
|
||||
|
||||
const CFF: Tag = Tag::from_bytes(b"CFF ");
|
||||
const CFF2: Tag = Tag::from_bytes(b"CFF2");
|
||||
|
||||
const SUBSET_TAG_LEN: usize = 6;
|
||||
const IDENTITY_H: &str = "Identity-H";
|
||||
|
||||
pub(crate) const CMAP_NAME: Name = Name(b"Custom");
|
||||
pub(crate) const SYSTEM_INFO: SystemInfo = SystemInfo {
|
||||
registry: Str(b"Adobe"),
|
||||
ordering: Str(b"Identity"),
|
||||
supplement: 0,
|
||||
};
|
||||
|
||||
/// Embed all used fonts into the PDF.
|
||||
#[typst_macros::time(name = "write fonts")]
|
||||
pub fn write_fonts(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<Font, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for font in resources.fonts.items() {
|
||||
if out.contains_key(font) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let type0_ref = chunk.alloc();
|
||||
let cid_ref = chunk.alloc();
|
||||
let descriptor_ref = chunk.alloc();
|
||||
let cmap_ref = chunk.alloc();
|
||||
let data_ref = chunk.alloc();
|
||||
out.insert(font.clone(), type0_ref);
|
||||
|
||||
let glyph_set = resources.glyph_sets.get(font).unwrap();
|
||||
let glyph_remapper = resources.glyph_remappers.get(font).unwrap();
|
||||
let ttf = font.ttf();
|
||||
|
||||
// Do we have a TrueType or CFF font?
|
||||
//
|
||||
// FIXME: CFF2 must be handled differently and requires PDF 2.0
|
||||
// (or we have to convert it to CFF).
|
||||
let is_cff = ttf
|
||||
.raw_face()
|
||||
.table(CFF)
|
||||
.or_else(|| ttf.raw_face().table(CFF2))
|
||||
.is_some();
|
||||
|
||||
let base_font = base_font_name(font, glyph_set);
|
||||
let base_font_type0 = if is_cff {
|
||||
eco_format!("{base_font}-{IDENTITY_H}")
|
||||
} else {
|
||||
base_font.clone()
|
||||
};
|
||||
|
||||
// Write the base font object referencing the CID font.
|
||||
chunk
|
||||
.type0_font(type0_ref)
|
||||
.base_font(Name(base_font_type0.as_bytes()))
|
||||
.encoding_predefined(Name(IDENTITY_H.as_bytes()))
|
||||
.descendant_font(cid_ref)
|
||||
.to_unicode(cmap_ref);
|
||||
|
||||
// Write the CID font referencing the font descriptor.
|
||||
let mut cid = chunk.cid_font(cid_ref);
|
||||
cid.subtype(if is_cff { CidFontType::Type0 } else { CidFontType::Type2 });
|
||||
cid.base_font(Name(base_font.as_bytes()));
|
||||
cid.system_info(SYSTEM_INFO);
|
||||
cid.font_descriptor(descriptor_ref);
|
||||
cid.default_width(0.0);
|
||||
if !is_cff {
|
||||
cid.cid_to_gid_map_predefined(Name(b"Identity"));
|
||||
}
|
||||
|
||||
// Extract the widths of all glyphs.
|
||||
// `remapped_gids` returns an iterator over the old GIDs in their new sorted
|
||||
// order, so we can append the widths as is.
|
||||
let widths = glyph_remapper
|
||||
.remapped_gids()
|
||||
.map(|gid| {
|
||||
let width = ttf.glyph_hor_advance(GlyphId(gid)).unwrap_or(0);
|
||||
font.to_em(width).to_font_units()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Write all non-zero glyph widths.
|
||||
let mut first = 0;
|
||||
let mut width_writer = cid.widths();
|
||||
for (w, group) in widths.group_by_key(|&w| w) {
|
||||
let end = first + group.len();
|
||||
if w != 0.0 {
|
||||
let last = end - 1;
|
||||
width_writer.same(first as u16, last as u16, w);
|
||||
}
|
||||
first = end;
|
||||
}
|
||||
|
||||
width_writer.finish();
|
||||
cid.finish();
|
||||
|
||||
// Write the /ToUnicode character map, which maps glyph ids back to
|
||||
// unicode codepoints to enable copying out of the PDF.
|
||||
let cmap = create_cmap(glyph_set, glyph_remapper);
|
||||
chunk
|
||||
.cmap(cmap_ref, &cmap)
|
||||
.writing_mode(WMode::Horizontal)
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
let subset = subset_font(font, glyph_remapper)
|
||||
.map_err(|err| {
|
||||
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
|
||||
let name = postscript_name.as_deref().unwrap_or(&font.info().family);
|
||||
eco_format!("failed to process font {name}: {err}")
|
||||
})
|
||||
.at(Span::detached())?;
|
||||
|
||||
let mut stream = chunk.stream(data_ref, &subset);
|
||||
stream.filter(Filter::FlateDecode);
|
||||
if is_cff {
|
||||
stream.pair(Name(b"Subtype"), Name(b"CIDFontType0C"));
|
||||
}
|
||||
stream.finish();
|
||||
|
||||
let mut font_descriptor =
|
||||
write_font_descriptor(&mut chunk, descriptor_ref, font, &base_font);
|
||||
if is_cff {
|
||||
font_descriptor.font_file3(data_ref);
|
||||
} else {
|
||||
font_descriptor.font_file2(data_ref);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Writes a FontDescriptor dictionary.
|
||||
pub fn write_font_descriptor<'a>(
|
||||
pdf: &'a mut Chunk,
|
||||
descriptor_ref: Ref,
|
||||
font: &'a Font,
|
||||
base_font: &str,
|
||||
) -> FontDescriptor<'a> {
|
||||
let ttf = font.ttf();
|
||||
let metrics = font.metrics();
|
||||
let serif = font
|
||||
.find_name(name_id::POST_SCRIPT_NAME)
|
||||
.is_some_and(|name| name.contains("Serif"));
|
||||
|
||||
let mut flags = FontFlags::empty();
|
||||
flags.set(FontFlags::SERIF, serif);
|
||||
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
|
||||
flags.set(FontFlags::ITALIC, ttf.is_italic());
|
||||
flags.insert(FontFlags::SYMBOLIC);
|
||||
flags.insert(FontFlags::SMALL_CAP);
|
||||
|
||||
let global_bbox = ttf.global_bounding_box();
|
||||
let bbox = Rect::new(
|
||||
font.to_em(global_bbox.x_min).to_font_units(),
|
||||
font.to_em(global_bbox.y_min).to_font_units(),
|
||||
font.to_em(global_bbox.x_max).to_font_units(),
|
||||
font.to_em(global_bbox.y_max).to_font_units(),
|
||||
);
|
||||
|
||||
let italic_angle = ttf.italic_angle().unwrap_or(0.0);
|
||||
let ascender = metrics.ascender.to_font_units();
|
||||
let descender = metrics.descender.to_font_units();
|
||||
let cap_height = metrics.cap_height.to_font_units();
|
||||
let stem_v = 10.0 + 0.244 * (f32::from(ttf.weight().to_number()) - 50.0);
|
||||
|
||||
// Write the font descriptor (contains metrics about the font).
|
||||
let mut font_descriptor = pdf.font_descriptor(descriptor_ref);
|
||||
font_descriptor
|
||||
.name(Name(base_font.as_bytes()))
|
||||
.flags(flags)
|
||||
.bbox(bbox)
|
||||
.italic_angle(italic_angle)
|
||||
.ascent(ascender)
|
||||
.descent(descender)
|
||||
.cap_height(cap_height)
|
||||
.stem_v(stem_v);
|
||||
|
||||
font_descriptor
|
||||
}
|
||||
|
||||
/// Subset a font to the given glyphs.
|
||||
///
|
||||
/// - For a font with TrueType outlines, this produces the whole OpenType font.
|
||||
/// - For a font with CFF outlines, this produces just the CFF font program.
|
||||
///
|
||||
/// In both cases, this returns the already compressed data.
|
||||
#[comemo::memoize]
|
||||
#[typst_macros::time(name = "subset font")]
|
||||
fn subset_font(
|
||||
font: &Font,
|
||||
glyph_remapper: &GlyphRemapper,
|
||||
) -> Result<Arc<Vec<u8>>, subsetter::Error> {
|
||||
let data = font.data();
|
||||
let subset = subsetter::subset(data, font.index(), glyph_remapper)?;
|
||||
let mut data = subset.as_ref();
|
||||
|
||||
// Extract the standalone CFF font program if applicable.
|
||||
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
|
||||
if let Some(cff) = raw.table(CFF) {
|
||||
data = cff;
|
||||
}
|
||||
|
||||
Ok(Arc::new(deflate(data)))
|
||||
}
|
||||
|
||||
/// Creates the base font name for a font with a specific glyph subset.
|
||||
/// Consists of a subset tag and the PostScript name of the font.
|
||||
///
|
||||
/// Returns a string of length maximum 116, so that even with `-Identity-H`
|
||||
/// added it does not exceed the maximum PDF/A name length of 127.
|
||||
pub(crate) fn base_font_name<T: Hash>(font: &Font, glyphs: &T) -> EcoString {
|
||||
const MAX_LEN: usize = Name::PDFA_LIMIT - REST_LEN;
|
||||
const REST_LEN: usize = SUBSET_TAG_LEN + 1 + 1 + IDENTITY_H.len();
|
||||
|
||||
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
|
||||
let name = postscript_name.as_deref().unwrap_or("unknown");
|
||||
let trimmed = &name[..name.len().min(MAX_LEN)];
|
||||
|
||||
// Hash the full name (we might have trimmed) and the glyphs to produce
|
||||
// a fairly unique subset tag.
|
||||
let subset_tag = subset_tag(&(name, glyphs));
|
||||
|
||||
eco_format!("{subset_tag}+{trimmed}")
|
||||
}
|
||||
|
||||
/// Produce a unique 6 letter tag for a glyph set.
|
||||
pub(crate) fn subset_tag<T: Hash>(glyphs: &T) -> EcoString {
|
||||
const BASE: u128 = 26;
|
||||
let mut hash = typst_utils::hash128(&glyphs);
|
||||
let mut letter = [b'A'; SUBSET_TAG_LEN];
|
||||
for l in letter.iter_mut() {
|
||||
*l = b'A' + (hash % BASE) as u8;
|
||||
hash /= BASE;
|
||||
}
|
||||
std::str::from_utf8(&letter).unwrap().into()
|
||||
}
|
||||
|
||||
/// Create a compressed `/ToUnicode` CMap.
|
||||
#[comemo::memoize]
|
||||
#[typst_macros::time(name = "create cmap")]
|
||||
fn create_cmap(
|
||||
glyph_set: &BTreeMap<u16, EcoString>,
|
||||
glyph_remapper: &GlyphRemapper,
|
||||
) -> Arc<Vec<u8>> {
|
||||
// Produce a reverse mapping from glyphs' CIDs to unicode strings.
|
||||
let mut cmap = UnicodeCmap::new(CMAP_NAME, SYSTEM_INFO);
|
||||
for (&g, text) in glyph_set.iter() {
|
||||
// See commend in `write_normal_text` for why we can choose the CID this way.
|
||||
let cid = glyph_remapper.get(g).unwrap();
|
||||
if !text.is_empty() {
|
||||
cmap.pair_with_multiple(cid, text.chars());
|
||||
}
|
||||
}
|
||||
Arc::new(deflate(&cmap.finish()))
|
||||
}
|
@ -1,512 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::f32::consts::{PI, TAU};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{ColorSpaceOperand, FunctionShadingType};
|
||||
use pdf_writer::writers::StreamShadingType;
|
||||
use pdf_writer::{Filter, Finish, Name, Ref};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
|
||||
use typst_library::visualize::{
|
||||
Color, ColorSpace, Gradient, RatioOrAngle, RelativeTo, WeightedColor,
|
||||
};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use crate::color_old::{
|
||||
self, check_cmyk_allowed, ColorSpaceExt, PaintEncode, QuantizedColor,
|
||||
};
|
||||
use crate::{content_old, deflate, transform_to_array, AbsExt, PdfChunk, WithGlobalRefs};
|
||||
|
||||
/// A unique-transform-aspect-ratio combination that will be encoded into the
|
||||
/// PDF.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct PdfGradient {
|
||||
/// The transform to apply to the gradient.
|
||||
pub transform: Transform,
|
||||
/// The aspect ratio of the gradient.
|
||||
/// Required for aspect ratio correction.
|
||||
pub aspect_ratio: Ratio,
|
||||
/// The gradient.
|
||||
pub gradient: Gradient,
|
||||
/// The corrected angle of the gradient.
|
||||
pub angle: Angle,
|
||||
}
|
||||
|
||||
/// Writes the actual gradients (shading patterns) to the PDF.
|
||||
/// This is performed once after writing all pages.
|
||||
pub fn write_gradients(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<PdfGradient, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for pdf_gradient in resources.gradients.items() {
|
||||
if out.contains_key(pdf_gradient) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let shading = chunk.alloc();
|
||||
out.insert(pdf_gradient.clone(), shading);
|
||||
|
||||
let PdfGradient { transform, aspect_ratio, gradient, angle } = pdf_gradient;
|
||||
|
||||
let color_space = if gradient.space().hue_index().is_some() {
|
||||
ColorSpace::Oklab
|
||||
} else {
|
||||
gradient.space()
|
||||
};
|
||||
|
||||
if color_space == ColorSpace::Cmyk {
|
||||
check_cmyk_allowed(context.options)?;
|
||||
}
|
||||
|
||||
let mut shading_pattern = match &gradient {
|
||||
Gradient::Linear(_) => {
|
||||
let shading_function =
|
||||
shading_function(gradient, &mut chunk, color_space);
|
||||
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
|
||||
let mut shading = shading_pattern.function_shading();
|
||||
shading.shading_type(FunctionShadingType::Axial);
|
||||
|
||||
color_old::write(
|
||||
color_space,
|
||||
shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
let (mut sin, mut cos) = (angle.sin(), angle.cos());
|
||||
|
||||
// Scale to edges of unit square.
|
||||
let factor = cos.abs() + sin.abs();
|
||||
sin *= factor;
|
||||
cos *= factor;
|
||||
|
||||
let (x1, y1, x2, y2): (f64, f64, f64, f64) = match angle.quadrant() {
|
||||
Quadrant::First => (0.0, 0.0, cos, sin),
|
||||
Quadrant::Second => (1.0, 0.0, cos + 1.0, sin),
|
||||
Quadrant::Third => (1.0, 1.0, cos + 1.0, sin + 1.0),
|
||||
Quadrant::Fourth => (0.0, 1.0, cos, sin + 1.0),
|
||||
};
|
||||
|
||||
shading
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.function(shading_function)
|
||||
.coords([x1 as f32, y1 as f32, x2 as f32, y2 as f32])
|
||||
.extend([true; 2]);
|
||||
|
||||
shading.finish();
|
||||
|
||||
shading_pattern
|
||||
}
|
||||
Gradient::Radial(radial) => {
|
||||
let shading_function =
|
||||
shading_function(gradient, &mut chunk, color_space_of(gradient));
|
||||
let mut shading_pattern = chunk.chunk.shading_pattern(shading);
|
||||
let mut shading = shading_pattern.function_shading();
|
||||
shading.shading_type(FunctionShadingType::Radial);
|
||||
|
||||
color_old::write(
|
||||
color_space,
|
||||
shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
shading
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.function(shading_function)
|
||||
.coords([
|
||||
radial.focal_center.x.get() as f32,
|
||||
radial.focal_center.y.get() as f32,
|
||||
radial.focal_radius.get() as f32,
|
||||
radial.center.x.get() as f32,
|
||||
radial.center.y.get() as f32,
|
||||
radial.radius.get() as f32,
|
||||
])
|
||||
.extend([true; 2]);
|
||||
|
||||
shading.finish();
|
||||
|
||||
shading_pattern
|
||||
}
|
||||
Gradient::Conic(_) => {
|
||||
let vertices = compute_vertex_stream(gradient, *aspect_ratio);
|
||||
|
||||
let stream_shading_id = chunk.alloc();
|
||||
let mut stream_shading =
|
||||
chunk.chunk.stream_shading(stream_shading_id, &vertices);
|
||||
|
||||
color_old::write(
|
||||
color_space,
|
||||
stream_shading.color_space(),
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
|
||||
let range = color_space.range();
|
||||
stream_shading
|
||||
.bits_per_coordinate(16)
|
||||
.bits_per_component(16)
|
||||
.bits_per_flag(8)
|
||||
.shading_type(StreamShadingType::CoonsPatch)
|
||||
.decode(
|
||||
[0.0, 1.0, 0.0, 1.0].into_iter().chain(range.iter().copied()),
|
||||
)
|
||||
.anti_alias(gradient.anti_alias())
|
||||
.filter(Filter::FlateDecode);
|
||||
|
||||
stream_shading.finish();
|
||||
|
||||
let mut shading_pattern = chunk.shading_pattern(shading);
|
||||
shading_pattern.shading_ref(stream_shading_id);
|
||||
shading_pattern
|
||||
}
|
||||
};
|
||||
|
||||
shading_pattern.matrix(transform_to_array(*transform));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Writes an exponential or stitched function that expresses the gradient.
|
||||
fn shading_function(
|
||||
gradient: &Gradient,
|
||||
chunk: &mut PdfChunk,
|
||||
color_space: ColorSpace,
|
||||
) -> Ref {
|
||||
let function = chunk.alloc();
|
||||
let mut functions = vec![];
|
||||
let mut bounds = vec![];
|
||||
let mut encode = vec![];
|
||||
|
||||
// Create the individual gradient functions for each pair of stops.
|
||||
for window in gradient.stops_ref().windows(2) {
|
||||
let (first, second) = (window[0], window[1]);
|
||||
|
||||
// If we have a hue index or are using Oklab, we will create several
|
||||
// stops in-between to make the gradient smoother without interpolation
|
||||
// issues with native color spaces.
|
||||
let mut last_c = first.0;
|
||||
if gradient.space().hue_index().is_some() {
|
||||
for i in 0..=32 {
|
||||
let t = i as f64 / 32.0;
|
||||
let real_t = first.1.get() * (1.0 - t) + second.1.get() * t;
|
||||
|
||||
let c = gradient.sample(RatioOrAngle::Ratio(Ratio::new(real_t)));
|
||||
functions.push(single_gradient(chunk, last_c, c, color_space));
|
||||
bounds.push(real_t as f32);
|
||||
encode.extend([0.0, 1.0]);
|
||||
last_c = c;
|
||||
}
|
||||
}
|
||||
|
||||
bounds.push(second.1.get() as f32);
|
||||
functions.push(single_gradient(chunk, first.0, second.0, color_space));
|
||||
encode.extend([0.0, 1.0]);
|
||||
}
|
||||
|
||||
// Special case for gradients with only two stops.
|
||||
if functions.len() == 1 {
|
||||
return functions[0];
|
||||
}
|
||||
|
||||
// Remove the last bound, since it's not needed for the stitching function.
|
||||
bounds.pop();
|
||||
|
||||
// Create the stitching function.
|
||||
chunk
|
||||
.stitching_function(function)
|
||||
.domain([0.0, 1.0])
|
||||
.range(color_space.range().iter().copied())
|
||||
.functions(functions)
|
||||
.bounds(bounds)
|
||||
.encode(encode);
|
||||
|
||||
function
|
||||
}
|
||||
|
||||
/// Writes an exponential function that expresses a single segment (between two
|
||||
/// stops) of a gradient.
|
||||
fn single_gradient(
|
||||
chunk: &mut PdfChunk,
|
||||
first_color: Color,
|
||||
second_color: Color,
|
||||
color_space: ColorSpace,
|
||||
) -> Ref {
|
||||
let reference = chunk.alloc();
|
||||
chunk
|
||||
.exponential_function(reference)
|
||||
.range(color_space.range().iter().copied())
|
||||
.c0(color_space.convert(first_color))
|
||||
.c1(color_space.convert(second_color))
|
||||
.domain([0.0, 1.0])
|
||||
.n(1.0);
|
||||
|
||||
reference
|
||||
}
|
||||
|
||||
impl PaintEncode for Gradient {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let index = register_gradient(ctx, self, on_text, transforms);
|
||||
let id = eco_format!("Gr{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_fill_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let index = register_gradient(ctx, self, on_text, transforms);
|
||||
let id = eco_format!("Gr{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_stroke_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Deduplicates a gradient to a named PDF resource.
|
||||
fn register_gradient(
|
||||
ctx: &mut content_old::Builder,
|
||||
gradient: &Gradient,
|
||||
on_text: bool,
|
||||
mut transforms: content_old::Transforms,
|
||||
) -> usize {
|
||||
// Edge cases for strokes.
|
||||
if transforms.size.x.is_zero() {
|
||||
transforms.size.x = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
if transforms.size.y.is_zero() {
|
||||
transforms.size.y = Abs::pt(1.0);
|
||||
}
|
||||
let size = match gradient.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.size,
|
||||
RelativeTo::Parent => transforms.container_size,
|
||||
};
|
||||
|
||||
let (offset_x, offset_y) = match gradient {
|
||||
Gradient::Conic(conic) => (
|
||||
-size.x * (1.0 - conic.center.x.get() / 2.0) / 2.0,
|
||||
-size.y * (1.0 - conic.center.y.get() / 2.0) / 2.0,
|
||||
),
|
||||
_ => (Abs::zero(), Abs::zero()),
|
||||
};
|
||||
|
||||
let rotation = gradient.angle().unwrap_or_else(Angle::zero);
|
||||
|
||||
let transform = match gradient.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.transform,
|
||||
RelativeTo::Parent => transforms.container_transform,
|
||||
};
|
||||
|
||||
let scale_offset = match gradient {
|
||||
Gradient::Conic(_) => 4.0_f64,
|
||||
_ => 1.0,
|
||||
};
|
||||
|
||||
let pdf_gradient = PdfGradient {
|
||||
aspect_ratio: size.aspect_ratio(),
|
||||
transform: transform
|
||||
.pre_concat(Transform::translate(
|
||||
offset_x * scale_offset,
|
||||
offset_y * scale_offset,
|
||||
))
|
||||
.pre_concat(Transform::scale(
|
||||
Ratio::new(size.x.to_pt() * scale_offset),
|
||||
Ratio::new(size.y.to_pt() * scale_offset),
|
||||
)),
|
||||
gradient: gradient.clone(),
|
||||
angle: Gradient::correct_aspect_ratio(rotation, size.aspect_ratio()),
|
||||
};
|
||||
|
||||
ctx.resources.colors.mark_as_used(color_space_of(gradient));
|
||||
|
||||
ctx.resources.gradients.insert(pdf_gradient)
|
||||
}
|
||||
|
||||
/// Writes a single Coons Patch as defined in the PDF specification
|
||||
/// to a binary vec.
|
||||
///
|
||||
/// Structure:
|
||||
/// - flag: `u8`
|
||||
/// - points: `[u16; 24]`
|
||||
/// - colors: `[u16; 4*N]` (N = number of components)
|
||||
fn write_patch(
|
||||
target: &mut Vec<u8>,
|
||||
t: f32,
|
||||
t1: f32,
|
||||
c0: &[u16],
|
||||
c1: &[u16],
|
||||
angle: Angle,
|
||||
) {
|
||||
let theta = -TAU * t + angle.to_rad() as f32 + PI;
|
||||
let theta1 = -TAU * t1 + angle.to_rad() as f32 + PI;
|
||||
|
||||
let (cp1, cp2) =
|
||||
control_point(Point::new(Abs::pt(0.5), Abs::pt(0.5)), 0.5, theta, theta1);
|
||||
|
||||
// Push the flag
|
||||
target.push(0);
|
||||
|
||||
let p1 =
|
||||
[u16::quantize(0.5, [0.0, 1.0]).to_be(), u16::quantize(0.5, [0.0, 1.0]).to_be()];
|
||||
|
||||
let p2 = [
|
||||
u16::quantize(theta.cos(), [-1.0, 1.0]).to_be(),
|
||||
u16::quantize(theta.sin(), [-1.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let p3 = [
|
||||
u16::quantize(theta1.cos(), [-1.0, 1.0]).to_be(),
|
||||
u16::quantize(theta1.sin(), [-1.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let cp1 = [
|
||||
u16::quantize(cp1.x.to_f32(), [0.0, 1.0]).to_be(),
|
||||
u16::quantize(cp1.y.to_f32(), [0.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
let cp2 = [
|
||||
u16::quantize(cp2.x.to_f32(), [0.0, 1.0]).to_be(),
|
||||
u16::quantize(cp2.y.to_f32(), [0.0, 1.0]).to_be(),
|
||||
];
|
||||
|
||||
// Push the points
|
||||
target.extend_from_slice(bytemuck::cast_slice(&[
|
||||
p1, p1, p2, p2, cp1, cp2, p3, p3, p1, p1, p1, p1,
|
||||
]));
|
||||
|
||||
// Push the colors.
|
||||
let colors = [c0, c0, c1, c1]
|
||||
.into_iter()
|
||||
.flat_map(|c| c.iter().copied().map(u16::to_be_bytes))
|
||||
.flatten();
|
||||
|
||||
target.extend(colors);
|
||||
}
|
||||
|
||||
fn control_point(c: Point, r: f32, angle_start: f32, angle_end: f32) -> (Point, Point) {
|
||||
let n = (TAU / (angle_end - angle_start)).abs();
|
||||
let f = ((angle_end - angle_start) / n).tan() * 4.0 / 3.0;
|
||||
|
||||
let p1 = c + Point::new(
|
||||
Abs::pt((r * angle_start.cos() - f * r * angle_start.sin()) as f64),
|
||||
Abs::pt((r * angle_start.sin() + f * r * angle_start.cos()) as f64),
|
||||
);
|
||||
|
||||
let p2 = c + Point::new(
|
||||
Abs::pt((r * angle_end.cos() + f * r * angle_end.sin()) as f64),
|
||||
Abs::pt((r * angle_end.sin() - f * r * angle_end.cos()) as f64),
|
||||
);
|
||||
|
||||
(p1, p2)
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn compute_vertex_stream(gradient: &Gradient, aspect_ratio: Ratio) -> Arc<Vec<u8>> {
|
||||
let Gradient::Conic(conic) = gradient else { unreachable!() };
|
||||
|
||||
// Generated vertices for the Coons patches
|
||||
let mut vertices = Vec::new();
|
||||
|
||||
// Correct the gradient's angle
|
||||
let angle = Gradient::correct_aspect_ratio(conic.angle, aspect_ratio);
|
||||
|
||||
for window in conic.stops.windows(2) {
|
||||
let ((c0, t0), (c1, t1)) = (window[0], window[1]);
|
||||
|
||||
// Precision:
|
||||
// - On an even color, insert a stop every 90deg
|
||||
// - For a hue-based color space, insert 200 stops minimum
|
||||
// - On any other, insert 20 stops minimum
|
||||
let max_dt = if c0 == c1 {
|
||||
0.25
|
||||
} else if conic.space.hue_index().is_some() {
|
||||
0.005
|
||||
} else {
|
||||
0.05
|
||||
};
|
||||
let encode_space = conic
|
||||
.space
|
||||
.hue_index()
|
||||
.map(|_| ColorSpace::Oklab)
|
||||
.unwrap_or(conic.space);
|
||||
let mut t_x = t0.get();
|
||||
let dt = (t1.get() - t0.get()).min(max_dt);
|
||||
|
||||
// Special casing for sharp gradients.
|
||||
if t0 == t1 {
|
||||
write_patch(
|
||||
&mut vertices,
|
||||
t0.get() as f32,
|
||||
t1.get() as f32,
|
||||
&encode_space.convert(c0),
|
||||
&encode_space.convert(c1),
|
||||
angle,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
while t_x < t1.get() {
|
||||
let t_next = (t_x + dt).min(t1.get());
|
||||
|
||||
// The current progress in the current window.
|
||||
let t = |t| (t - t0.get()) / (t1.get() - t0.get());
|
||||
let c = Color::mix_iter(
|
||||
[WeightedColor::new(c0, 1.0 - t(t_x)), WeightedColor::new(c1, t(t_x))],
|
||||
conic.space,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let c_next = Color::mix_iter(
|
||||
[
|
||||
WeightedColor::new(c0, 1.0 - t(t_next)),
|
||||
WeightedColor::new(c1, t(t_next)),
|
||||
],
|
||||
conic.space,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
write_patch(
|
||||
&mut vertices,
|
||||
t_x as f32,
|
||||
t_next as f32,
|
||||
&encode_space.convert(c),
|
||||
&encode_space.convert(c_next),
|
||||
angle,
|
||||
);
|
||||
|
||||
t_x = t_next;
|
||||
}
|
||||
}
|
||||
|
||||
Arc::new(deflate(&vertices))
|
||||
}
|
||||
|
||||
fn color_space_of(gradient: &Gradient) -> ColorSpace {
|
||||
if gradient.space().hue_index().is_some() {
|
||||
ColorSpace::Oklab
|
||||
} else {
|
||||
gradient.space()
|
||||
}
|
||||
}
|
@ -1,236 +0,0 @@
|
||||
use ecow::eco_format;
|
||||
use image::{GenericImageView, Rgba};
|
||||
use pdf_writer::{Chunk, Filter, Finish, Ref};
|
||||
use std::collections::HashMap;
|
||||
use typst_library::diag::{At, SourceResult, StrResult};
|
||||
use typst_library::visualize::{ColorSpace, Image, ImageKind, RasterImage, SvgImage};
|
||||
use typst_utils::Deferred;
|
||||
|
||||
use crate::{color_old, deflate, PdfChunk, WithGlobalRefs};
|
||||
|
||||
/// Embed all used images into the PDF.
|
||||
#[typst_macros::time(name = "write images")]
|
||||
pub fn write_images(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<Image, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
for (i, image) in resources.images.items().enumerate() {
|
||||
if out.contains_key(image) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (handle, span) = resources.deferred_images.get(&i).unwrap();
|
||||
let encoded = handle.wait().as_ref().map_err(Clone::clone).at(*span)?;
|
||||
|
||||
match encoded {
|
||||
EncodedImage::Raster {
|
||||
data,
|
||||
filter,
|
||||
has_color,
|
||||
width,
|
||||
height,
|
||||
icc,
|
||||
alpha,
|
||||
} => {
|
||||
let image_ref = chunk.alloc();
|
||||
out.insert(image.clone(), image_ref);
|
||||
|
||||
let mut image = chunk.chunk.image_xobject(image_ref, data);
|
||||
image.filter(*filter);
|
||||
image.width(*width as i32);
|
||||
image.height(*height as i32);
|
||||
image.bits_per_component(8);
|
||||
|
||||
let mut icc_ref = None;
|
||||
let space = image.color_space();
|
||||
if icc.is_some() {
|
||||
let id = chunk.alloc.bump();
|
||||
space.icc_based(id);
|
||||
icc_ref = Some(id);
|
||||
} else if *has_color {
|
||||
color_old::write(
|
||||
ColorSpace::Srgb,
|
||||
space,
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
} else {
|
||||
color_old::write(
|
||||
ColorSpace::D65Gray,
|
||||
space,
|
||||
&context.globals.color_functions,
|
||||
);
|
||||
}
|
||||
|
||||
// Add a second gray-scale image containing the alpha values if
|
||||
// this image has an alpha channel.
|
||||
if let Some((alpha_data, alpha_filter)) = alpha {
|
||||
let mask_ref = chunk.alloc.bump();
|
||||
image.s_mask(mask_ref);
|
||||
image.finish();
|
||||
|
||||
let mut mask = chunk.image_xobject(mask_ref, alpha_data);
|
||||
mask.filter(*alpha_filter);
|
||||
mask.width(*width as i32);
|
||||
mask.height(*height as i32);
|
||||
mask.color_space().device_gray();
|
||||
mask.bits_per_component(8);
|
||||
} else {
|
||||
image.finish();
|
||||
}
|
||||
|
||||
if let (Some(icc), Some(icc_ref)) = (icc, icc_ref) {
|
||||
let mut stream = chunk.icc_profile(icc_ref, icc);
|
||||
stream.filter(Filter::FlateDecode);
|
||||
if *has_color {
|
||||
stream.n(3);
|
||||
stream.alternate().srgb();
|
||||
} else {
|
||||
stream.n(1);
|
||||
stream.alternate().d65_gray();
|
||||
}
|
||||
}
|
||||
}
|
||||
EncodedImage::Svg(svg_chunk, id) => {
|
||||
let mut map = HashMap::new();
|
||||
svg_chunk.renumber_into(&mut chunk.chunk, |old| {
|
||||
*map.entry(old).or_insert_with(|| chunk.alloc.bump())
|
||||
});
|
||||
out.insert(image.clone(), map[id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// Creates a new PDF image from the given image.
|
||||
///
|
||||
/// Also starts the deferred encoding of the image.
|
||||
#[comemo::memoize]
|
||||
pub fn deferred_image(
|
||||
image: Image,
|
||||
pdfa: bool,
|
||||
) -> (Deferred<StrResult<EncodedImage>>, Option<ColorSpace>) {
|
||||
let color_space = match image.kind() {
|
||||
ImageKind::Raster(raster) if raster.icc().is_none() => {
|
||||
if raster.dynamic().color().channel_count() > 2 {
|
||||
Some(ColorSpace::Srgb)
|
||||
} else {
|
||||
Some(ColorSpace::D65Gray)
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let deferred = Deferred::new(move || match image.kind() {
|
||||
ImageKind::Raster(raster) => {
|
||||
let raster = raster.clone();
|
||||
let (width, height) = (raster.width(), raster.height());
|
||||
let (data, filter, has_color) = encode_raster_image(&raster);
|
||||
let icc = raster.icc().map(deflate);
|
||||
|
||||
let alpha =
|
||||
raster.dynamic().color().has_alpha().then(|| encode_alpha(&raster));
|
||||
|
||||
Ok(EncodedImage::Raster {
|
||||
data,
|
||||
filter,
|
||||
has_color,
|
||||
width,
|
||||
height,
|
||||
icc,
|
||||
alpha,
|
||||
})
|
||||
}
|
||||
ImageKind::Svg(svg) => {
|
||||
let (chunk, id) = encode_svg(svg, pdfa)
|
||||
.map_err(|err| eco_format!("failed to convert SVG to PDF: {err}"))?;
|
||||
Ok(EncodedImage::Svg(chunk, id))
|
||||
}
|
||||
});
|
||||
|
||||
(deferred, color_space)
|
||||
}
|
||||
|
||||
/// Encode an image with a suitable filter and return the data, filter and
|
||||
/// whether the image has color.
|
||||
///
|
||||
/// Skips the alpha channel as that's encoded separately.
|
||||
#[typst_macros::time(name = "encode raster image")]
|
||||
fn encode_raster_image(image: &RasterImage) -> (Vec<u8>, Filter, bool) {
|
||||
// let dynamic = image.dynamic();
|
||||
// let channel_count = dynamic.color().channel_count();
|
||||
// let has_color = channel_count > 2;
|
||||
//
|
||||
// if image.format() == RasterFormat::Jpg {
|
||||
// let mut data = Cursor::new(vec![]);
|
||||
// dynamic.write_to(&mut data, image::ImageFormat::Jpeg).unwrap();
|
||||
// (data.into_inner(), Filter::DctDecode, has_color)
|
||||
// } else {
|
||||
// // TODO: Encode flate streams with PNG-predictor?
|
||||
// let data = match (dynamic, channel_count) {
|
||||
// (DynamicImage::ImageLuma8(luma), _) => deflate(luma.as_raw()),
|
||||
// (DynamicImage::ImageRgb8(rgb), _) => deflate(rgb.as_raw()),
|
||||
// // Grayscale image
|
||||
// (_, 1 | 2) => deflate(dynamic.to_luma8().as_raw()),
|
||||
// // Anything else
|
||||
// _ => deflate(dynamic.to_rgb8().as_raw()),
|
||||
// };
|
||||
// (data, Filter::FlateDecode, has_color)
|
||||
// }
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
/// Encode an image's alpha channel if present.
|
||||
#[typst_macros::time(name = "encode alpha")]
|
||||
fn encode_alpha(raster: &RasterImage) -> (Vec<u8>, Filter) {
|
||||
let pixels: Vec<_> = raster
|
||||
.dynamic()
|
||||
.pixels()
|
||||
.map(|(_, _, Rgba([_, _, _, a]))| a)
|
||||
.collect();
|
||||
(deflate(&pixels), Filter::FlateDecode)
|
||||
}
|
||||
|
||||
/// Encode an SVG into a chunk of PDF objects.
|
||||
#[typst_macros::time(name = "encode svg")]
|
||||
fn encode_svg(
|
||||
svg: &SvgImage,
|
||||
pdfa: bool,
|
||||
) -> Result<(Chunk, Ref), svg2pdf::ConversionError> {
|
||||
unimplemented!();
|
||||
// svg2pdf::to_chunk(
|
||||
// svg.tree(),
|
||||
// svg2pdf::ConversionOptions { pdfa, ..Default::default() },
|
||||
// )
|
||||
}
|
||||
|
||||
/// A pre-encoded image.
|
||||
pub enum EncodedImage {
|
||||
/// A pre-encoded rasterized image.
|
||||
Raster {
|
||||
/// The raw, pre-deflated image data.
|
||||
data: Vec<u8>,
|
||||
/// The filter to use for the image.
|
||||
filter: Filter,
|
||||
/// Whether the image has color.
|
||||
has_color: bool,
|
||||
/// The image's width.
|
||||
width: u32,
|
||||
/// The image's height.
|
||||
height: u32,
|
||||
/// The image's ICC profile, pre-deflated, if any.
|
||||
icc: Option<Vec<u8>>,
|
||||
/// The alpha channel of the image, pre-deflated, if any.
|
||||
alpha: Option<(Vec<u8>, Filter)>,
|
||||
},
|
||||
/// A vector graphic.
|
||||
///
|
||||
/// The chunk is the SVG converted to PDF objects.
|
||||
Svg(Chunk, Ref),
|
||||
}
|
@ -1,21 +1,18 @@
|
||||
use crate::primitive::{PointExt, SizeExt, TransformExt};
|
||||
use crate::{paint, AbsExt, PdfOptions};
|
||||
use bytemuck::TransparentWrapper;
|
||||
use ecow::{eco_format, EcoString};
|
||||
use ecow::EcoString;
|
||||
use krilla::action::{Action, LinkAction};
|
||||
use krilla::annotation::{LinkAnnotation, Target};
|
||||
use krilla::destination::XyzDestination;
|
||||
use krilla::font::{GlyphId, GlyphUnits};
|
||||
use krilla::path::PathBuilder;
|
||||
use krilla::surface::Surface;
|
||||
use krilla::validation::Validator;
|
||||
use krilla::version::PdfVersion;
|
||||
use krilla::{PageSettings, SerializeSettings, SvgSettings};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::fs::metadata;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use svg2pdf::usvg::Rect;
|
||||
use krilla::geom::Rect;
|
||||
use typst_library::diag::{bail, SourceResult};
|
||||
use typst_library::foundations::Datetime;
|
||||
use typst_library::layout::{
|
||||
|
@ -1,51 +1,17 @@
|
||||
//! Exporting of Typst documents into PDFs.
|
||||
|
||||
mod catalog_old;
|
||||
mod color_font;
|
||||
mod color_old;
|
||||
mod content_old;
|
||||
mod extg_old;
|
||||
mod font_old;
|
||||
mod gradient_old;
|
||||
mod image;
|
||||
mod image_old;
|
||||
mod krilla;
|
||||
mod named_destination_old;
|
||||
mod outline_old;
|
||||
mod page_old;
|
||||
mod paint;
|
||||
mod pattern_old;
|
||||
mod primitive;
|
||||
mod resources_old;
|
||||
|
||||
use base64::Engine;
|
||||
use pdf_writer::{Chunk, Name, Pdf, Ref, Str, TextStr};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
mod image;
|
||||
mod krilla;
|
||||
mod paint;
|
||||
mod primitive;
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use typst_library::diag::{bail, SourceResult, StrResult};
|
||||
use typst_library::foundations::{Datetime, Smart};
|
||||
use typst_library::layout::{Abs, Em, PageRanges, PagedDocument, Transform};
|
||||
use typst_library::text::Font;
|
||||
use typst_library::visualize::Image;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::Deferred;
|
||||
|
||||
use crate::catalog_old::write_catalog;
|
||||
use crate::color_font::{write_color_fonts, ColorFontSlice};
|
||||
use crate::color_old::{alloc_color_functions_refs, ColorFunctionRefs};
|
||||
use crate::extg_old::{write_graphic_states, ExtGState};
|
||||
use crate::font_old::write_fonts;
|
||||
use crate::gradient_old::{write_gradients, PdfGradient};
|
||||
use crate::image_old::write_images;
|
||||
use crate::named_destination_old::{write_named_destinations, NamedDestinations};
|
||||
use crate::page_old::{alloc_page_refs, traverse_pages, write_page_tree, EncodedPage};
|
||||
use crate::pattern_old::{write_patterns, PdfPattern};
|
||||
use crate::resources_old::{
|
||||
alloc_resources_refs, write_resource_dictionaries, Resources, ResourcesRefs,
|
||||
};
|
||||
|
||||
/// Export a document into a PDF file.
|
||||
///
|
||||
@ -53,29 +19,6 @@ use crate::resources_old::{
|
||||
#[typst_macros::time(name = "pdf")]
|
||||
pub fn pdf(document: &PagedDocument, options: &PdfOptions) -> SourceResult<Vec<u8>> {
|
||||
krilla::pdf(document, options)
|
||||
// PdfBuilder::new(document, options)
|
||||
// .phase(|builder| builder.run(traverse_pages))?
|
||||
// .phase(|builder| {
|
||||
// Ok(GlobalRefs {
|
||||
// color_functions: builder.run(alloc_color_functions_refs)?,
|
||||
// pages: builder.run(alloc_page_refs)?,
|
||||
// resources: builder.run(alloc_resources_refs)?,
|
||||
// })
|
||||
// })?
|
||||
// .phase(|builder| {
|
||||
// Ok(References {
|
||||
// named_destinations: builder.run(write_named_destinations)?,
|
||||
// fonts: builder.run(write_fonts)?,
|
||||
// color_fonts: builder.run(write_color_fonts)?,
|
||||
// images: builder.run(write_images)?,
|
||||
// gradients: builder.run(write_gradients)?,
|
||||
// patterns: builder.run(write_patterns)?,
|
||||
// ext_gs: builder.run(write_graphic_states)?,
|
||||
// })
|
||||
// })?
|
||||
// .phase(|builder| builder.run(write_page_tree))?
|
||||
// .phase(|builder| builder.run(write_resource_dictionaries))?
|
||||
// .export_with(write_catalog)
|
||||
}
|
||||
|
||||
pub use ::krilla::validation::Validator;
|
||||
@ -108,427 +51,6 @@ pub struct PdfOptions<'a> {
|
||||
pub validator: Validator,
|
||||
}
|
||||
|
||||
/// Encapsulates a list of compatible PDF standards.
|
||||
#[derive(Clone)]
|
||||
pub struct PdfStandards {
|
||||
/// For now, we simplify to just PDF/A, since we only support PDF/A-2b. But
|
||||
/// it can be more fine-grained in the future.
|
||||
pub(crate) pdfa: bool,
|
||||
}
|
||||
|
||||
impl PdfStandards {
|
||||
/// Validates a list of PDF standards for compatibility and returns their
|
||||
/// encapsulated representation.
|
||||
pub fn new(list: &[PdfStandard]) -> StrResult<Self> {
|
||||
Ok(Self { pdfa: list.contains(&PdfStandard::A_2b) })
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for PdfStandards {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
f.pad("PdfStandards(..)")
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::derivable_impls)]
|
||||
impl Default for PdfStandards {
|
||||
fn default() -> Self {
|
||||
Self { pdfa: false }
|
||||
}
|
||||
}
|
||||
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
///
|
||||
/// Support for more standards is planned.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
#[non_exhaustive]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.7.
|
||||
#[serde(rename = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF/A-2b.
|
||||
#[serde(rename = "a-2b")]
|
||||
A_2b,
|
||||
}
|
||||
|
||||
/// A struct to build a PDF following a fixed succession of phases.
|
||||
///
|
||||
/// This type uses generics to represent its current state. `S` (for "state") is
|
||||
/// all data that was produced by the previous phases, that is now read-only.
|
||||
///
|
||||
/// Phase after phase, this state will be transformed. Each phase corresponds to
|
||||
/// a call to the [eponymous function](`PdfBuilder::phase`) and produces a new
|
||||
/// part of the state, that will be aggregated with all other information, for
|
||||
/// consumption during the next phase.
|
||||
///
|
||||
/// In other words: this struct follows the **typestate pattern**. This prevents
|
||||
/// you from using data that is not yet available, at the type level.
|
||||
///
|
||||
/// Each phase consists of processes, that can read the state of the previous
|
||||
/// phases, and construct a part of the new state.
|
||||
///
|
||||
/// A final step, that has direct access to the global reference allocator and
|
||||
/// PDF document, can be run with [`PdfBuilder::export_with`].
|
||||
struct PdfBuilder<S> {
|
||||
/// The context that has been accumulated so far.
|
||||
state: S,
|
||||
/// A global bump allocator.
|
||||
alloc: Ref,
|
||||
/// The PDF document that is being written.
|
||||
pdf: Pdf,
|
||||
}
|
||||
|
||||
/// The initial state: we are exploring the document, collecting all resources
|
||||
/// that will be necessary later. The content of the pages is also built during
|
||||
/// this phase.
|
||||
struct WithDocument<'a> {
|
||||
/// The Typst document that is exported.
|
||||
document: &'a PagedDocument,
|
||||
/// Settings for PDF export.
|
||||
options: &'a PdfOptions<'a>,
|
||||
}
|
||||
|
||||
/// At this point, resources were listed, but they don't have any reference
|
||||
/// associated with them.
|
||||
///
|
||||
/// This phase allocates some global references.
|
||||
struct WithResources<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
/// The content of the pages encoded as PDF content streams.
|
||||
///
|
||||
/// The pages are at the index corresponding to their page number, but they
|
||||
/// may be `None` if they are not in the range specified by
|
||||
/// `exported_pages`.
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
/// The PDF resources that are used in the content of the pages.
|
||||
resources: Resources<()>,
|
||||
}
|
||||
|
||||
/// Global references.
|
||||
struct GlobalRefs {
|
||||
/// References for color conversion functions.
|
||||
color_functions: ColorFunctionRefs,
|
||||
/// Reference for pages.
|
||||
///
|
||||
/// Items of this vector are `None` if the corresponding page is not
|
||||
/// exported.
|
||||
pages: Vec<Option<Ref>>,
|
||||
/// References for the resource dictionaries.
|
||||
resources: ResourcesRefs,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithDocument<'a>, (Vec<Option<EncodedPage>>, Resources<()>))>
|
||||
for WithResources<'a>
|
||||
{
|
||||
fn from(
|
||||
(previous, (pages, resources)): (
|
||||
WithDocument<'a>,
|
||||
(Vec<Option<EncodedPage>>, Resources<()>),
|
||||
),
|
||||
) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
pages,
|
||||
resources,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// At this point, the resources have been collected, and global references have
|
||||
/// been allocated.
|
||||
///
|
||||
/// We are now writing objects corresponding to resources, and giving them references,
|
||||
/// that will be collected in [`References`].
|
||||
struct WithGlobalRefs<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
/// Resources are the same as in previous phases, but each dictionary now has a reference.
|
||||
resources: Resources,
|
||||
/// Global references that were just allocated.
|
||||
globals: GlobalRefs,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithResources<'a>, GlobalRefs)> for WithGlobalRefs<'a> {
|
||||
fn from((previous, globals): (WithResources<'a>, GlobalRefs)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
pages: previous.pages,
|
||||
resources: previous.resources.with_refs(&globals.resources),
|
||||
globals,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The references that have been assigned to each object.
|
||||
struct References {
|
||||
/// List of named destinations, each with an ID.
|
||||
named_destinations: NamedDestinations,
|
||||
/// The IDs of written fonts.
|
||||
fonts: HashMap<Font, Ref>,
|
||||
/// The IDs of written color fonts.
|
||||
color_fonts: HashMap<ColorFontSlice, Ref>,
|
||||
/// The IDs of written images.
|
||||
images: HashMap<Image, Ref>,
|
||||
/// The IDs of written gradients.
|
||||
gradients: HashMap<PdfGradient, Ref>,
|
||||
/// The IDs of written patterns.
|
||||
patterns: HashMap<PdfPattern, Ref>,
|
||||
/// The IDs of written external graphics states.
|
||||
ext_gs: HashMap<ExtGState, Ref>,
|
||||
}
|
||||
|
||||
/// At this point, the references have been assigned to all resources. The page
|
||||
/// tree is going to be written, and given a reference. It is also at this point that
|
||||
/// the page contents is actually written.
|
||||
struct WithRefs<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
globals: GlobalRefs,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
resources: Resources,
|
||||
/// References that were allocated for resources.
|
||||
references: References,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
|
||||
fn from((previous, references): (WithGlobalRefs<'a>, References)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
globals: previous.globals,
|
||||
pages: previous.pages,
|
||||
resources: previous.resources,
|
||||
references,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// In this phase, we write resource dictionaries.
|
||||
///
|
||||
/// Each sub-resource gets its own isolated resource dictionary.
|
||||
struct WithEverything<'a> {
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions<'a>,
|
||||
globals: GlobalRefs,
|
||||
pages: Vec<Option<EncodedPage>>,
|
||||
resources: Resources,
|
||||
references: References,
|
||||
/// Reference that was allocated for the page tree.
|
||||
page_tree_ref: Ref,
|
||||
}
|
||||
|
||||
impl<'a> From<(WithEverything<'a>, ())> for WithEverything<'a> {
|
||||
fn from((this, _): (WithEverything<'a>, ())) -> Self {
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
|
||||
fn from((previous, page_tree_ref): (WithRefs<'a>, Ref)) -> Self {
|
||||
Self {
|
||||
document: previous.document,
|
||||
options: previous.options,
|
||||
globals: previous.globals,
|
||||
resources: previous.resources,
|
||||
references: previous.references,
|
||||
pages: previous.pages,
|
||||
page_tree_ref,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PdfBuilder<WithDocument<'a>> {
|
||||
/// Start building a PDF for a Typst document.
|
||||
fn new(document: &'a PagedDocument, options: &'a PdfOptions<'a>) -> Self {
|
||||
Self {
|
||||
alloc: Ref::new(1),
|
||||
pdf: Pdf::new(),
|
||||
state: WithDocument { document, options },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> PdfBuilder<S> {
|
||||
/// Start a new phase, and save its output in the global state.
|
||||
fn phase<NS, B, O>(mut self, builder: B) -> SourceResult<PdfBuilder<NS>>
|
||||
where
|
||||
// New state
|
||||
NS: From<(S, O)>,
|
||||
// Builder
|
||||
B: Fn(&mut Self) -> SourceResult<O>,
|
||||
{
|
||||
let output = builder(&mut self)?;
|
||||
Ok(PdfBuilder {
|
||||
state: NS::from((self.state, output)),
|
||||
alloc: self.alloc,
|
||||
pdf: self.pdf,
|
||||
})
|
||||
}
|
||||
|
||||
/// Run a step with the current state, merges its output into the PDF file,
|
||||
/// and renumbers any references it returned.
|
||||
fn run<P, O>(&mut self, process: P) -> SourceResult<O>
|
||||
where
|
||||
// Process
|
||||
P: Fn(&S) -> SourceResult<(PdfChunk, O)>,
|
||||
// Output
|
||||
O: Renumber,
|
||||
{
|
||||
let (chunk, mut output) = process(&self.state)?;
|
||||
// Allocate a final reference for each temporary one
|
||||
let allocated = chunk.alloc.get() - TEMPORARY_REFS_START;
|
||||
let offset = TEMPORARY_REFS_START - self.alloc.get();
|
||||
|
||||
// Merge the chunk into the PDF, using the new references
|
||||
chunk.renumber_into(&mut self.pdf, |mut r| {
|
||||
r.renumber(offset);
|
||||
|
||||
r
|
||||
});
|
||||
|
||||
// Also update the references in the output
|
||||
output.renumber(offset);
|
||||
|
||||
self.alloc = Ref::new(self.alloc.get() + allocated);
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Finalize the PDF export and returns the buffer representing the
|
||||
/// document.
|
||||
fn export_with<P>(mut self, process: P) -> SourceResult<Vec<u8>>
|
||||
where
|
||||
P: Fn(S, &mut Pdf, &mut Ref) -> SourceResult<()>,
|
||||
{
|
||||
process(self.state, &mut self.pdf, &mut self.alloc)?;
|
||||
Ok(self.pdf.finish())
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference or collection of references that can be re-numbered,
|
||||
/// to become valid in a global scope.
|
||||
trait Renumber {
|
||||
/// Renumber this value by shifting any references it contains by `offset`.
|
||||
fn renumber(&mut self, offset: i32);
|
||||
}
|
||||
|
||||
impl Renumber for () {
|
||||
fn renumber(&mut self, _offset: i32) {}
|
||||
}
|
||||
|
||||
impl Renumber for Ref {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if self.get() >= TEMPORARY_REFS_START {
|
||||
*self = Ref::new(self.get() - offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Vec<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for item in self {
|
||||
item.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Eq + Hash, R: Renumber> Renumber for HashMap<T, R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for v in self.values_mut() {
|
||||
v.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Option<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
if let Some(r) = self {
|
||||
r.renumber(offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, R: Renumber> Renumber for (T, R) {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.1.renumber(offset)
|
||||
}
|
||||
}
|
||||
|
||||
/// A portion of a PDF file.
|
||||
struct PdfChunk {
|
||||
/// The actual chunk.
|
||||
chunk: Chunk,
|
||||
/// A local allocator.
|
||||
alloc: Ref,
|
||||
}
|
||||
|
||||
/// Any reference below that value was already allocated before and
|
||||
/// should not be rewritten. Anything above was allocated in the current
|
||||
/// chunk, and should be remapped.
|
||||
///
|
||||
/// This is a constant (large enough to avoid collisions) and not
|
||||
/// dependent on self.alloc to allow for better memoization of steps, if
|
||||
/// needed in the future.
|
||||
const TEMPORARY_REFS_START: i32 = 1_000_000_000;
|
||||
|
||||
/// A part of a PDF document.
|
||||
impl PdfChunk {
|
||||
/// Start writing a new part of the document.
|
||||
fn new() -> Self {
|
||||
PdfChunk {
|
||||
chunk: Chunk::new(),
|
||||
alloc: Ref::new(TEMPORARY_REFS_START),
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate a reference that is valid in the context of this chunk.
|
||||
///
|
||||
/// References allocated with this function should be [renumbered](`Renumber::renumber`)
|
||||
/// before being used in other chunks. This is done automatically if these
|
||||
/// references are stored in the global `PdfBuilder` state.
|
||||
fn alloc(&mut self) -> Ref {
|
||||
self.alloc.bump()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for PdfChunk {
|
||||
type Target = Chunk;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.chunk
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for PdfChunk {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.chunk
|
||||
}
|
||||
}
|
||||
|
||||
/// Compress data with the DEFLATE algorithm.
|
||||
fn deflate(data: &[u8]) -> Vec<u8> {
|
||||
const COMPRESSION_LEVEL: u8 = 6;
|
||||
miniz_oxide::deflate::compress_to_vec_zlib(data, COMPRESSION_LEVEL)
|
||||
}
|
||||
|
||||
/// Memoized and deferred version of [`deflate`] specialized for a page's content
|
||||
/// stream.
|
||||
#[comemo::memoize]
|
||||
fn deflate_deferred(content: Vec<u8>) -> Deferred<Vec<u8>> {
|
||||
Deferred::new(move || deflate(&content))
|
||||
}
|
||||
|
||||
/// Create a base64-encoded hash of the value.
|
||||
fn hash_base64<T: Hash>(value: &T) -> String {
|
||||
base64::engine::general_purpose::STANDARD
|
||||
.encode(typst_utils::hash128(value).to_be_bytes())
|
||||
}
|
||||
|
||||
/// Additional methods for [`Abs`].
|
||||
trait AbsExt {
|
||||
/// Convert an to a number of points.
|
||||
@ -540,84 +62,3 @@ impl AbsExt for Abs {
|
||||
self.to_pt() as f32
|
||||
}
|
||||
}
|
||||
|
||||
/// Additional methods for [`Em`].
|
||||
trait EmExt {
|
||||
/// Convert an em length to a number of PDF font units.
|
||||
fn to_font_units(self) -> f32;
|
||||
}
|
||||
|
||||
impl EmExt for Em {
|
||||
fn to_font_units(self) -> f32 {
|
||||
1000.0 * self.get() as f32
|
||||
}
|
||||
}
|
||||
|
||||
trait NameExt<'a> {
|
||||
/// The maximum length of a name in PDF/A.
|
||||
const PDFA_LIMIT: usize = 127;
|
||||
}
|
||||
|
||||
impl<'a> NameExt<'a> for Name<'a> {}
|
||||
|
||||
/// Additional methods for [`Str`].
|
||||
trait StrExt<'a>: Sized {
|
||||
/// The maximum length of a string in PDF/A.
|
||||
const PDFA_LIMIT: usize = 32767;
|
||||
|
||||
/// Create a string that satisfies the constraints of PDF/A.
|
||||
#[allow(unused)]
|
||||
fn trimmed(string: &'a [u8]) -> Self;
|
||||
}
|
||||
|
||||
impl<'a> StrExt<'a> for Str<'a> {
|
||||
fn trimmed(string: &'a [u8]) -> Self {
|
||||
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
|
||||
}
|
||||
}
|
||||
|
||||
/// Additional methods for [`TextStr`].
|
||||
trait TextStrExt<'a>: Sized {
|
||||
/// The maximum length of a string in PDF/A.
|
||||
const PDFA_LIMIT: usize = Str::PDFA_LIMIT;
|
||||
|
||||
/// Create a text string that satisfies the constraints of PDF/A.
|
||||
fn trimmed(string: &'a str) -> Self;
|
||||
}
|
||||
|
||||
impl<'a> TextStrExt<'a> for TextStr<'a> {
|
||||
fn trimmed(string: &'a str) -> Self {
|
||||
Self(&string[..string.len().min(Self::PDFA_LIMIT)])
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for [`Content`](pdf_writer::Content).
|
||||
trait ContentExt {
|
||||
fn save_state_checked(&mut self) -> SourceResult<()>;
|
||||
}
|
||||
|
||||
impl ContentExt for pdf_writer::Content {
|
||||
fn save_state_checked(&mut self) -> SourceResult<()> {
|
||||
self.save_state();
|
||||
if self.state_nesting_depth() > 28 {
|
||||
bail!(
|
||||
Span::detached(),
|
||||
"maximum PDF grouping depth exceeding";
|
||||
hint: "try to avoid excessive nesting of layout containers",
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to an array of floats.
|
||||
fn transform_to_array(ts: Transform) -> [f32; 6] {
|
||||
[
|
||||
ts.sx.get() as f32,
|
||||
ts.ky.get() as f32,
|
||||
ts.kx.get() as f32,
|
||||
ts.sy.get() as f32,
|
||||
ts.tx.to_f32(),
|
||||
ts.ty.to_f32(),
|
||||
]
|
||||
}
|
||||
|
@ -1,86 +0,0 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use pdf_writer::writers::Destination;
|
||||
use pdf_writer::{Ref, Str};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::{Label, NativeElement};
|
||||
use typst_library::introspection::Location;
|
||||
use typst_library::layout::Abs;
|
||||
use typst_library::model::HeadingElem;
|
||||
|
||||
use crate::{AbsExt, PdfChunk, Renumber, StrExt, WithGlobalRefs};
|
||||
|
||||
/// A list of destinations in the PDF document (a specific point on a specific
|
||||
/// page), that have a name associated with them.
|
||||
///
|
||||
/// Typst creates a named destination for each heading in the document, that
|
||||
/// will then be written in the document catalog. PDF readers can then display
|
||||
/// them to show a clickable outline of the document.
|
||||
#[derive(Default)]
|
||||
pub struct NamedDestinations {
|
||||
/// A map between elements and their associated labels
|
||||
pub loc_to_dest: HashMap<Location, Label>,
|
||||
/// A sorted list of all named destinations.
|
||||
pub dests: Vec<(Label, Ref)>,
|
||||
}
|
||||
|
||||
impl Renumber for NamedDestinations {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
for (_, reference) in &mut self.dests {
|
||||
reference.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fills in the map and vector for named destinations and writes the indirect
|
||||
/// destination objects.
|
||||
pub fn write_named_destinations(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, NamedDestinations)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = NamedDestinations::default();
|
||||
let mut seen = HashSet::new();
|
||||
|
||||
// Find all headings that have a label and are the first among other
|
||||
// headings with the same label.
|
||||
let mut matches: Vec<_> = context
|
||||
.document
|
||||
.introspector
|
||||
.query(&HeadingElem::elem().select())
|
||||
.iter()
|
||||
.filter_map(|elem| elem.location().zip(elem.label()))
|
||||
.filter(|&(_, label)| seen.insert(label))
|
||||
.collect();
|
||||
|
||||
// Named destinations must be sorted by key.
|
||||
matches.sort_by_key(|&(_, label)| label.resolve());
|
||||
|
||||
for (loc, label) in matches {
|
||||
// Don't encode named destinations that would exceed the limit. Those
|
||||
// will instead be encoded as normal links.
|
||||
if label.resolve().len() > Str::PDFA_LIMIT {
|
||||
continue;
|
||||
}
|
||||
|
||||
let pos = context.document.introspector.position(loc);
|
||||
let index = pos.page.get() - 1;
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
context.pages.get(index).zip(context.globals.pages.get(index))
|
||||
{
|
||||
let dest_ref = chunk.alloc();
|
||||
let x = pos.point.x.to_f32();
|
||||
let y = (page.content.size.y - y).to_f32();
|
||||
out.dests.push((label, dest_ref));
|
||||
out.loc_to_dest.insert(loc, label);
|
||||
chunk
|
||||
.indirect(dest_ref)
|
||||
.start::<Destination>()
|
||||
.page(*page_ref)
|
||||
.xyz(x, y, None);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
@ -1,222 +0,0 @@
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use pdf_writer::{Finish, Pdf, Ref, TextStr};
|
||||
use typst_library::foundations::{NativeElement, Packed, StyleChain};
|
||||
use typst_library::layout::Abs;
|
||||
use typst_library::model::HeadingElem;
|
||||
|
||||
use crate::{AbsExt, TextStrExt, WithEverything};
|
||||
|
||||
/// Construct the outline for the document.
|
||||
pub(crate) fn write_outline(
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
ctx: &WithEverything,
|
||||
) -> Option<Ref> {
|
||||
let mut tree: Vec<HeadingNode> = vec![];
|
||||
|
||||
// Stores the level of the topmost skipped ancestor of the next bookmarked
|
||||
// heading. A skipped heading is a heading with 'bookmarked: false', that
|
||||
// is, it is not added to the PDF outline, and so is not in the tree.
|
||||
// Therefore, its next descendant must be added at its level, which is
|
||||
// enforced in the manner shown below.
|
||||
let mut last_skipped_level = None;
|
||||
let elements = ctx.document.introspector.query(&HeadingElem::elem().select());
|
||||
|
||||
for elem in elements.iter() {
|
||||
if let Some(page_ranges) = &ctx.options.page_ranges {
|
||||
if !page_ranges
|
||||
.includes_page(ctx.document.introspector.page(elem.location().unwrap()))
|
||||
{
|
||||
// Don't bookmark headings in non-exported pages
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let heading = elem.to_packed::<HeadingElem>().unwrap();
|
||||
let leaf = HeadingNode::leaf(heading);
|
||||
|
||||
if leaf.bookmarked {
|
||||
let mut children = &mut tree;
|
||||
|
||||
// Descend the tree through the latest bookmarked heading of each
|
||||
// level until either:
|
||||
// - you reach a node whose children would be brothers of this
|
||||
// heading (=> add the current heading as a child of this node);
|
||||
// - you reach a node with no children (=> this heading probably
|
||||
// skipped a few nesting levels in Typst, or one or more ancestors
|
||||
// of this heading weren't bookmarked, so add it as a child of this
|
||||
// node, which is its deepest bookmarked ancestor);
|
||||
// - or, if the latest heading(s) was(/were) skipped
|
||||
// ('bookmarked: false'), then stop if you reach a node whose
|
||||
// children would be brothers of the latest skipped heading
|
||||
// of lowest level (=> those skipped headings would be ancestors
|
||||
// of the current heading, so add it as a 'brother' of the least
|
||||
// deep skipped ancestor among them, as those ancestors weren't
|
||||
// added to the bookmark tree, and the current heading should not
|
||||
// be mistakenly added as a descendant of a brother of that
|
||||
// ancestor.)
|
||||
//
|
||||
// That is, if you had a bookmarked heading of level N, a skipped
|
||||
// heading of level N, a skipped heading of level N + 1, and then
|
||||
// a bookmarked heading of level N + 2, that last one is bookmarked
|
||||
// as a level N heading (taking the place of its topmost skipped
|
||||
// ancestor), so that it is not mistakenly added as a descendant of
|
||||
// the previous level N heading.
|
||||
//
|
||||
// In other words, a heading can be added to the bookmark tree
|
||||
// at most as deep as its topmost skipped direct ancestor (if it
|
||||
// exists), or at most as deep as its actual nesting level in Typst
|
||||
// (not exceeding whichever is the most restrictive depth limit
|
||||
// of those two).
|
||||
while children.last().is_some_and(|last| {
|
||||
last_skipped_level.map_or(true, |l| last.level < l)
|
||||
&& last.level < leaf.level
|
||||
}) {
|
||||
children = &mut children.last_mut().unwrap().children;
|
||||
}
|
||||
|
||||
// Since this heading was bookmarked, the next heading, if it is a
|
||||
// child of this one, won't have a skipped direct ancestor (indeed,
|
||||
// this heading would be its most direct ancestor, and wasn't
|
||||
// skipped). Therefore, it can be added as a child of this one, if
|
||||
// needed, following the usual rules listed above.
|
||||
last_skipped_level = None;
|
||||
children.push(leaf);
|
||||
} else if last_skipped_level.map_or(true, |l| leaf.level < l) {
|
||||
// Only the topmost / lowest-level skipped heading matters when you
|
||||
// have consecutive skipped headings (since none of them are being
|
||||
// added to the bookmark tree), hence the condition above.
|
||||
// This ensures the next bookmarked heading will be placed
|
||||
// at most as deep as its topmost skipped ancestors. Deeper
|
||||
// ancestors do not matter as the nesting structure they create
|
||||
// won't be visible in the PDF outline.
|
||||
last_skipped_level = Some(leaf.level);
|
||||
}
|
||||
}
|
||||
|
||||
if tree.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let root_id = alloc.bump();
|
||||
let start_ref = *alloc;
|
||||
let len = tree.len();
|
||||
|
||||
let mut prev_ref = None;
|
||||
for (i, node) in tree.iter().enumerate() {
|
||||
prev_ref = Some(write_outline_item(
|
||||
ctx,
|
||||
chunk,
|
||||
alloc,
|
||||
node,
|
||||
root_id,
|
||||
prev_ref,
|
||||
i + 1 == len,
|
||||
));
|
||||
}
|
||||
|
||||
chunk
|
||||
.outline(root_id)
|
||||
.first(start_ref)
|
||||
.last(Ref::new(
|
||||
alloc.get() - tree.last().map(|child| child.len() as i32).unwrap_or(1),
|
||||
))
|
||||
.count(tree.len() as i32);
|
||||
|
||||
Some(root_id)
|
||||
}
|
||||
|
||||
/// A heading in the outline panel.
|
||||
#[derive(Debug)]
|
||||
struct HeadingNode<'a> {
|
||||
element: &'a Packed<HeadingElem>,
|
||||
level: NonZeroUsize,
|
||||
bookmarked: bool,
|
||||
children: Vec<HeadingNode<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> HeadingNode<'a> {
|
||||
fn leaf(element: &'a Packed<HeadingElem>) -> Self {
|
||||
HeadingNode {
|
||||
level: element.resolve_level(StyleChain::default()),
|
||||
// 'bookmarked' set to 'auto' falls back to the value of 'outlined'.
|
||||
bookmarked: element
|
||||
.bookmarked(StyleChain::default())
|
||||
.unwrap_or_else(|| element.outlined(StyleChain::default())),
|
||||
element,
|
||||
children: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn len(&self) -> usize {
|
||||
1 + self.children.iter().map(Self::len).sum::<usize>()
|
||||
}
|
||||
}
|
||||
|
||||
/// Write an outline item and all its children.
|
||||
fn write_outline_item(
|
||||
ctx: &WithEverything,
|
||||
chunk: &mut Pdf,
|
||||
alloc: &mut Ref,
|
||||
node: &HeadingNode,
|
||||
parent_ref: Ref,
|
||||
prev_ref: Option<Ref>,
|
||||
is_last: bool,
|
||||
) -> Ref {
|
||||
let id = alloc.bump();
|
||||
let next_ref = Ref::new(id.get() + node.len() as i32);
|
||||
|
||||
let mut outline = chunk.outline_item(id);
|
||||
outline.parent(parent_ref);
|
||||
|
||||
if !is_last {
|
||||
outline.next(next_ref);
|
||||
}
|
||||
|
||||
if let Some(prev_rev) = prev_ref {
|
||||
outline.prev(prev_rev);
|
||||
}
|
||||
|
||||
if let Some(last_immediate_child) = node.children.last() {
|
||||
outline.first(Ref::new(id.get() + 1));
|
||||
outline.last(Ref::new(next_ref.get() - last_immediate_child.len() as i32));
|
||||
outline.count(-(node.children.len() as i32));
|
||||
}
|
||||
|
||||
let body = node.element.body();
|
||||
outline.title(TextStr::trimmed(body.plain_text().trim()));
|
||||
|
||||
let loc = node.element.location().unwrap();
|
||||
let pos = ctx.document.introspector.position(loc);
|
||||
let index = pos.page.get() - 1;
|
||||
|
||||
// Don't link to non-exported pages.
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
|
||||
{
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
outline.dest().page(*page_ref).xyz(
|
||||
pos.point.x.to_f32(),
|
||||
(page.content.size.y - y).to_f32(),
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
outline.finish();
|
||||
|
||||
let mut prev_ref = None;
|
||||
for (i, child) in node.children.iter().enumerate() {
|
||||
prev_ref = Some(write_outline_item(
|
||||
ctx,
|
||||
chunk,
|
||||
alloc,
|
||||
child,
|
||||
id,
|
||||
prev_ref,
|
||||
i + 1 == node.children.len(),
|
||||
));
|
||||
}
|
||||
|
||||
id
|
||||
}
|
@ -1,310 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use ecow::EcoString;
|
||||
use pdf_writer::types::{ActionType, AnnotationFlags, AnnotationType, NumberingStyle};
|
||||
use pdf_writer::{Filter, Finish, Name, Rect, Ref, Str};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::foundations::Label;
|
||||
use typst_library::introspection::Location;
|
||||
use typst_library::layout::{Abs, Page};
|
||||
use typst_library::model::{Destination, Numbering};
|
||||
|
||||
use crate::{
|
||||
content_old, AbsExt, PdfChunk, PdfOptions, Resources, WithDocument, WithRefs,
|
||||
WithResources,
|
||||
};
|
||||
|
||||
/// Construct page objects.
|
||||
#[typst_macros::time(name = "construct pages")]
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn traverse_pages(
|
||||
state: &WithDocument,
|
||||
) -> SourceResult<(PdfChunk, (Vec<Option<EncodedPage>>, Resources<()>))> {
|
||||
let mut resources = Resources::default();
|
||||
let mut pages = Vec::with_capacity(state.document.pages.len());
|
||||
let mut skipped_pages = 0;
|
||||
for (i, page) in state.document.pages.iter().enumerate() {
|
||||
if state
|
||||
.options
|
||||
.page_ranges
|
||||
.as_ref()
|
||||
.is_some_and(|ranges| !ranges.includes_page_index(i))
|
||||
{
|
||||
// Don't export this page.
|
||||
pages.push(None);
|
||||
skipped_pages += 1;
|
||||
} else {
|
||||
let mut encoded = construct_page(state.options, &mut resources, page)?;
|
||||
encoded.label = page
|
||||
.numbering
|
||||
.as_ref()
|
||||
.and_then(|num| PdfPageLabel::generate(num, page.number))
|
||||
.or_else(|| {
|
||||
// When some pages were ignored from export, we show a page label with
|
||||
// the correct real (not logical) page number.
|
||||
// This is for consistency with normal output when pages have no numbering
|
||||
// and all are exported: the final PDF page numbers always correspond to
|
||||
// the real (not logical) page numbers. Here, the final PDF page number
|
||||
// will differ, but we can at least use labels to indicate what was
|
||||
// the corresponding real page number in the Typst document.
|
||||
(skipped_pages > 0).then(|| PdfPageLabel::arabic(i + 1))
|
||||
});
|
||||
pages.push(Some(encoded));
|
||||
}
|
||||
}
|
||||
|
||||
Ok((PdfChunk::new(), (pages, resources)))
|
||||
}
|
||||
|
||||
/// Construct a page object.
|
||||
#[typst_macros::time(name = "construct page")]
|
||||
fn construct_page(
|
||||
options: &PdfOptions,
|
||||
out: &mut Resources<()>,
|
||||
page: &Page,
|
||||
) -> SourceResult<EncodedPage> {
|
||||
Ok(EncodedPage {
|
||||
content: content_old::build(
|
||||
options,
|
||||
out,
|
||||
&page.frame,
|
||||
page.fill_or_transparent(),
|
||||
None,
|
||||
)?,
|
||||
label: None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Allocate a reference for each exported page.
|
||||
pub fn alloc_page_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, Vec<Option<Ref>>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let page_refs = context
|
||||
.pages
|
||||
.iter()
|
||||
.map(|p| p.as_ref().map(|_| chunk.alloc()))
|
||||
.collect();
|
||||
Ok((chunk, page_refs))
|
||||
}
|
||||
|
||||
/// Write the page tree.
|
||||
pub fn write_page_tree(ctx: &WithRefs) -> SourceResult<(PdfChunk, Ref)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let page_tree_ref = chunk.alloc.bump();
|
||||
|
||||
for i in 0..ctx.pages.len() {
|
||||
let content_id = chunk.alloc.bump();
|
||||
write_page(
|
||||
&mut chunk,
|
||||
ctx,
|
||||
content_id,
|
||||
page_tree_ref,
|
||||
&ctx.references.named_destinations.loc_to_dest,
|
||||
i,
|
||||
);
|
||||
}
|
||||
|
||||
let page_kids = ctx.globals.pages.iter().filter_map(Option::as_ref).copied();
|
||||
|
||||
chunk
|
||||
.pages(page_tree_ref)
|
||||
.count(page_kids.clone().count() as i32)
|
||||
.kids(page_kids);
|
||||
|
||||
Ok((chunk, page_tree_ref))
|
||||
}
|
||||
|
||||
/// Write a page tree node.
|
||||
fn write_page(
|
||||
chunk: &mut PdfChunk,
|
||||
ctx: &WithRefs,
|
||||
content_id: Ref,
|
||||
page_tree_ref: Ref,
|
||||
loc_to_dest: &HashMap<Location, Label>,
|
||||
i: usize,
|
||||
) {
|
||||
let Some((page, page_ref)) = ctx.pages[i].as_ref().zip(ctx.globals.pages[i]) else {
|
||||
// Page excluded from export.
|
||||
return;
|
||||
};
|
||||
|
||||
let mut annotations = Vec::with_capacity(page.content.links.len());
|
||||
for (dest, rect) in &page.content.links {
|
||||
let id = chunk.alloc();
|
||||
annotations.push(id);
|
||||
|
||||
let mut annotation = chunk.annotation(id);
|
||||
annotation.subtype(AnnotationType::Link).rect(*rect);
|
||||
annotation.border(0.0, 0.0, 0.0, None).flags(AnnotationFlags::PRINT);
|
||||
|
||||
let pos = match dest {
|
||||
Destination::Url(uri) => {
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::Uri)
|
||||
.uri(Str(uri.as_bytes()));
|
||||
continue;
|
||||
}
|
||||
Destination::Position(pos) => *pos,
|
||||
Destination::Location(loc) => {
|
||||
if let Some(key) = loc_to_dest.get(loc) {
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::GoTo)
|
||||
// `key` must be a `Str`, not a `Name`.
|
||||
.pair(Name(b"D"), Str(key.resolve().as_bytes()));
|
||||
continue;
|
||||
} else {
|
||||
ctx.document.introspector.position(*loc)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let index = pos.page.get() - 1;
|
||||
let y = (pos.point.y - Abs::pt(10.0)).max(Abs::zero());
|
||||
|
||||
// Don't add links to non-exported pages.
|
||||
if let Some((Some(page), Some(page_ref))) =
|
||||
ctx.pages.get(index).zip(ctx.globals.pages.get(index))
|
||||
{
|
||||
annotation
|
||||
.action()
|
||||
.action_type(ActionType::GoTo)
|
||||
.destination()
|
||||
.page(*page_ref)
|
||||
.xyz(pos.point.x.to_f32(), (page.content.size.y - y).to_f32(), None);
|
||||
}
|
||||
}
|
||||
|
||||
let mut page_writer = chunk.page(page_ref);
|
||||
page_writer.parent(page_tree_ref);
|
||||
|
||||
let w = page.content.size.x.to_f32();
|
||||
let h = page.content.size.y.to_f32();
|
||||
page_writer.media_box(Rect::new(0.0, 0.0, w, h));
|
||||
page_writer.contents(content_id);
|
||||
page_writer.pair(Name(b"Resources"), ctx.resources.reference);
|
||||
|
||||
if page.content.uses_opacities {
|
||||
page_writer
|
||||
.group()
|
||||
.transparency()
|
||||
.isolated(false)
|
||||
.knockout(false)
|
||||
.color_space()
|
||||
.srgb();
|
||||
}
|
||||
|
||||
page_writer.annotations(annotations);
|
||||
|
||||
page_writer.finish();
|
||||
|
||||
chunk
|
||||
.stream(content_id, page.content.content.wait())
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
/// Specification for a PDF page label.
|
||||
#[derive(Debug, Clone, PartialEq, Hash, Default)]
|
||||
pub(crate) struct PdfPageLabel {
|
||||
/// Can be any string or none. Will always be prepended to the numbering style.
|
||||
pub prefix: Option<EcoString>,
|
||||
/// Based on the numbering pattern.
|
||||
///
|
||||
/// If `None` or numbering is a function, the field will be empty.
|
||||
pub style: Option<PdfPageLabelStyle>,
|
||||
/// Offset for the page label start.
|
||||
///
|
||||
/// Describes where to start counting from when setting a style.
|
||||
/// (Has to be greater or equal than 1)
|
||||
pub offset: Option<NonZeroUsize>,
|
||||
}
|
||||
|
||||
/// A PDF page label number style.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub enum PdfPageLabelStyle {
|
||||
/// Decimal arabic numerals (1, 2, 3).
|
||||
Arabic,
|
||||
/// Lowercase roman numerals (i, ii, iii).
|
||||
LowerRoman,
|
||||
/// Uppercase roman numerals (I, II, III).
|
||||
UpperRoman,
|
||||
/// Lowercase letters (`a` to `z` for the first 26 pages,
|
||||
/// `aa` to `zz` and so on for the next).
|
||||
LowerAlpha,
|
||||
/// Uppercase letters (`A` to `Z` for the first 26 pages,
|
||||
/// `AA` to `ZZ` and so on for the next).
|
||||
UpperAlpha,
|
||||
}
|
||||
|
||||
impl PdfPageLabel {
|
||||
/// Create a new `PdfNumbering` from a `Numbering` applied to a page
|
||||
/// number.
|
||||
fn generate(numbering: &Numbering, number: usize) -> Option<PdfPageLabel> {
|
||||
let Numbering::Pattern(pat) = numbering else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let (prefix, kind) = pat.pieces.first()?;
|
||||
|
||||
// If there is a suffix, we cannot use the common style optimisation,
|
||||
// since PDF does not provide a suffix field.
|
||||
let style = if pat.suffix.is_empty() {
|
||||
use typst_library::model::NumberingKind as Kind;
|
||||
use PdfPageLabelStyle as Style;
|
||||
match kind {
|
||||
Kind::Arabic => Some(Style::Arabic),
|
||||
Kind::LowerRoman => Some(Style::LowerRoman),
|
||||
Kind::UpperRoman => Some(Style::UpperRoman),
|
||||
Kind::LowerLatin if number <= 26 => Some(Style::LowerAlpha),
|
||||
Kind::LowerLatin if number <= 26 => Some(Style::UpperAlpha),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Prefix and offset depend on the style: If it is supported by the PDF
|
||||
// spec, we use the given prefix and an offset. Otherwise, everything
|
||||
// goes into prefix.
|
||||
let prefix = if style.is_none() {
|
||||
Some(pat.apply(&[number]))
|
||||
} else {
|
||||
(!prefix.is_empty()).then(|| prefix.clone())
|
||||
};
|
||||
|
||||
let offset = style.and(NonZeroUsize::new(number));
|
||||
Some(PdfPageLabel { prefix, style, offset })
|
||||
}
|
||||
|
||||
/// Creates an arabic page label with the specified page number.
|
||||
/// For example, this will display page label `11` when given the page
|
||||
/// number 11.
|
||||
fn arabic(number: usize) -> PdfPageLabel {
|
||||
PdfPageLabel {
|
||||
prefix: None,
|
||||
style: Some(PdfPageLabelStyle::Arabic),
|
||||
offset: NonZeroUsize::new(number),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PdfPageLabelStyle {
|
||||
pub fn to_pdf_numbering_style(self) -> NumberingStyle {
|
||||
match self {
|
||||
PdfPageLabelStyle::Arabic => NumberingStyle::Arabic,
|
||||
PdfPageLabelStyle::LowerRoman => NumberingStyle::LowerRoman,
|
||||
PdfPageLabelStyle::UpperRoman => NumberingStyle::UpperRoman,
|
||||
PdfPageLabelStyle::LowerAlpha => NumberingStyle::LowerAlpha,
|
||||
PdfPageLabelStyle::UpperAlpha => NumberingStyle::UpperAlpha,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Data for an exported page.
|
||||
pub struct EncodedPage {
|
||||
pub content: content_old::Encoded,
|
||||
pub label: Option<PdfPageLabel>,
|
||||
}
|
@ -1,184 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use ecow::eco_format;
|
||||
use pdf_writer::types::{ColorSpaceOperand, PaintType, TilingType};
|
||||
use pdf_writer::{Filter, Name, Rect, Ref};
|
||||
use typst_library::diag::SourceResult;
|
||||
use typst_library::layout::{Abs, Ratio, Transform};
|
||||
use typst_library::visualize::{Pattern, RelativeTo};
|
||||
use typst_utils::Numeric;
|
||||
|
||||
use crate::color_old::PaintEncode;
|
||||
use crate::resources_old::{Remapper, ResourcesRefs};
|
||||
use crate::{content_old, transform_to_array, PdfChunk, Resources, WithGlobalRefs};
|
||||
|
||||
/// Writes the actual patterns (tiling patterns) to the PDF.
|
||||
/// This is performed once after writing all pages.
|
||||
pub fn write_patterns(
|
||||
context: &WithGlobalRefs,
|
||||
) -> SourceResult<(PdfChunk, HashMap<PdfPattern, Ref>)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut out = HashMap::new();
|
||||
context.resources.traverse(&mut |resources| {
|
||||
let Some(patterns) = &resources.patterns else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
for pdf_pattern in patterns.remapper.items() {
|
||||
let PdfPattern { transform, pattern, content, .. } = pdf_pattern;
|
||||
if out.contains_key(pdf_pattern) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tiling = chunk.alloc();
|
||||
out.insert(pdf_pattern.clone(), tiling);
|
||||
|
||||
let mut tiling_pattern = chunk.tiling_pattern(tiling, content);
|
||||
tiling_pattern
|
||||
.tiling_type(TilingType::ConstantSpacing)
|
||||
.paint_type(PaintType::Colored)
|
||||
.bbox(Rect::new(
|
||||
0.0,
|
||||
0.0,
|
||||
pattern.size().x.to_pt() as _,
|
||||
pattern.size().y.to_pt() as _,
|
||||
))
|
||||
.x_step((pattern.size().x + pattern.spacing().x).to_pt() as _)
|
||||
.y_step((pattern.size().y + pattern.spacing().y).to_pt() as _);
|
||||
|
||||
// The actual resource dict will be written in a later step
|
||||
tiling_pattern.pair(Name(b"Resources"), patterns.resources.reference);
|
||||
|
||||
tiling_pattern
|
||||
.matrix(transform_to_array(
|
||||
transform
|
||||
.pre_concat(Transform::scale(Ratio::one(), -Ratio::one()))
|
||||
.post_concat(Transform::translate(
|
||||
Abs::zero(),
|
||||
pattern.spacing().y,
|
||||
)),
|
||||
))
|
||||
.filter(Filter::FlateDecode);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok((chunk, out))
|
||||
}
|
||||
|
||||
/// A pattern and its transform.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct PdfPattern {
|
||||
/// The transform to apply to the pattern.
|
||||
pub transform: Transform,
|
||||
/// The pattern to paint.
|
||||
pub pattern: Pattern,
|
||||
/// The rendered pattern.
|
||||
pub content: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Registers a pattern with the PDF.
|
||||
fn register_pattern(
|
||||
ctx: &mut content_old::Builder,
|
||||
pattern: &Pattern,
|
||||
on_text: bool,
|
||||
mut transforms: content_old::Transforms,
|
||||
) -> SourceResult<usize> {
|
||||
let patterns = ctx
|
||||
.resources
|
||||
.patterns
|
||||
.get_or_insert_with(|| Box::new(PatternRemapper::new()));
|
||||
|
||||
// Edge cases for strokes.
|
||||
if transforms.size.x.is_zero() {
|
||||
transforms.size.x = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
if transforms.size.y.is_zero() {
|
||||
transforms.size.y = Abs::pt(1.0);
|
||||
}
|
||||
|
||||
let transform = match pattern.unwrap_relative(on_text) {
|
||||
RelativeTo::Self_ => transforms.transform,
|
||||
RelativeTo::Parent => transforms.container_transform,
|
||||
};
|
||||
|
||||
// Render the body.
|
||||
let content = content_old::build(
|
||||
ctx.options,
|
||||
&mut patterns.resources,
|
||||
pattern.frame(),
|
||||
None,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let pdf_pattern = PdfPattern {
|
||||
transform,
|
||||
pattern: pattern.clone(),
|
||||
content: content.content.wait().clone(),
|
||||
};
|
||||
|
||||
Ok(patterns.remapper.insert(pdf_pattern))
|
||||
}
|
||||
|
||||
impl PaintEncode for Pattern {
|
||||
fn set_as_fill(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_fill_color_space();
|
||||
|
||||
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||
let id = eco_format!("P{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_fill_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_as_stroke(
|
||||
&self,
|
||||
ctx: &mut content_old::Builder,
|
||||
on_text: bool,
|
||||
transforms: content_old::Transforms,
|
||||
) -> SourceResult<()> {
|
||||
ctx.reset_stroke_color_space();
|
||||
|
||||
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||
let id = eco_format!("P{index}");
|
||||
let name = Name(id.as_bytes());
|
||||
|
||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||
ctx.content.set_stroke_pattern(None, name);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// De-duplicate patterns and the resources they require to be drawn.
|
||||
pub struct PatternRemapper<R> {
|
||||
/// Pattern de-duplicator.
|
||||
pub remapper: Remapper<PdfPattern>,
|
||||
/// PDF resources that are used by these patterns.
|
||||
pub resources: Resources<R>,
|
||||
}
|
||||
|
||||
impl PatternRemapper<()> {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
remapper: Remapper::new("P"),
|
||||
resources: Resources::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate a reference to the resource dictionary of these patterns.
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> PatternRemapper<Ref> {
|
||||
PatternRemapper {
|
||||
remapper: self.remapper,
|
||||
resources: self.resources.with_refs(refs),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,349 +0,0 @@
|
||||
//! PDF resources.
|
||||
//!
|
||||
//! Resources are defined in dictionaries. They map identifiers such as `Im0` to
|
||||
//! a PDF reference. Each [content stream] is associated with a resource dictionary.
|
||||
//! The identifiers defined in the resources can then be used in content streams.
|
||||
//!
|
||||
//! [content stream]: `crate::content_old`
|
||||
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::hash::Hash;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use pdf_writer::{Dict, Finish, Name, Ref};
|
||||
use subsetter::GlyphRemapper;
|
||||
use typst_library::diag::{SourceResult, StrResult};
|
||||
use typst_library::text::{Font, Lang};
|
||||
use typst_library::visualize::Image;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::Deferred;
|
||||
|
||||
use crate::color_font::ColorFontMap;
|
||||
use crate::color_old::ColorSpaces;
|
||||
use crate::extg_old::ExtGState;
|
||||
use crate::gradient_old::PdfGradient;
|
||||
use crate::image_old::EncodedImage;
|
||||
use crate::pattern_old::PatternRemapper;
|
||||
use crate::{PdfChunk, Renumber, WithEverything, WithResources};
|
||||
|
||||
/// All the resources that have been collected when traversing the document.
|
||||
///
|
||||
/// This does not allocate references to resources, only track what was used
|
||||
/// and deduplicate what can be deduplicated.
|
||||
///
|
||||
/// You may notice that this structure is a tree: [`PatternRemapper`] and
|
||||
/// [`ColorFontMap`] (that are present in the fields of [`Resources`]),
|
||||
/// themselves contain [`Resources`] (that will be called "sub-resources" from
|
||||
/// now on). Because color glyphs and patterns are defined using content
|
||||
/// streams, just like pages, they can refer to resources too, which are tracked
|
||||
/// by the respective sub-resources.
|
||||
///
|
||||
/// Each instance of this structure will become a `/Resources` dictionary in
|
||||
/// the final PDF. It is not possible to use a single shared dictionary for all
|
||||
/// pages, patterns and color fonts, because if a resource is listed in its own
|
||||
/// `/Resources` dictionary, some PDF readers will fail to open the document.
|
||||
///
|
||||
/// Because we need to lazily initialize sub-resources (we don't know how deep
|
||||
/// the tree will be before reading the document), and that this is done in a
|
||||
/// context where no PDF reference allocator is available, `Resources` are
|
||||
/// originally created with the type parameter `R = ()`. The reference for each
|
||||
/// dictionary will only be allocated in the next phase, once we know the shape
|
||||
/// of the tree, at which point `R` becomes `Ref`. No other value of `R` should
|
||||
/// ever exist.
|
||||
pub struct Resources<R = Ref> {
|
||||
/// The global reference to this resource dictionary, or `()` if it has not
|
||||
/// been allocated yet.
|
||||
pub reference: R,
|
||||
|
||||
/// Handles color space writing.
|
||||
pub colors: ColorSpaces,
|
||||
|
||||
/// Deduplicates fonts used across the document.
|
||||
pub fonts: Remapper<Font>,
|
||||
/// Deduplicates images used across the document.
|
||||
pub images: Remapper<Image>,
|
||||
/// Handles to deferred image conversions.
|
||||
pub deferred_images: HashMap<usize, (Deferred<StrResult<EncodedImage>>, Span)>,
|
||||
/// Deduplicates gradients used across the document.
|
||||
pub gradients: Remapper<PdfGradient>,
|
||||
/// Deduplicates patterns used across the document.
|
||||
pub patterns: Option<Box<PatternRemapper<R>>>,
|
||||
/// Deduplicates external graphics states used across the document.
|
||||
pub ext_gs: Remapper<ExtGState>,
|
||||
/// Deduplicates color glyphs.
|
||||
pub color_fonts: Option<Box<ColorFontMap<R>>>,
|
||||
|
||||
// The fields below do not correspond to actual resources that will be
|
||||
// written in a dictionary, but are more meta-data about resources that
|
||||
// can't really live somewhere else.
|
||||
/// The number of glyphs for all referenced languages in the content stream.
|
||||
/// We keep track of this to determine the main document language.
|
||||
/// BTreeMap is used to write sorted list of languages to metadata.
|
||||
pub languages: BTreeMap<Lang, usize>,
|
||||
|
||||
/// For each font a mapping from used glyphs to their text representation.
|
||||
/// This is used for the PDF's /ToUnicode map, and important for copy-paste
|
||||
/// and searching.
|
||||
///
|
||||
/// Note that the text representation may contain multiple chars in case of
|
||||
/// ligatures or similar things, and it may have no entry in the font's cmap
|
||||
/// (or only a private-use codepoint), like the “Th” in Linux Libertine.
|
||||
///
|
||||
/// A glyph may have multiple entries in the font's cmap, and even the same
|
||||
/// glyph can have a different text representation within one document.
|
||||
/// But /ToUnicode does not support that, so we just save the first occurrence.
|
||||
pub glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
|
||||
/// Same as `glyph_sets`, but for color fonts.
|
||||
pub color_glyph_sets: HashMap<Font, BTreeMap<u16, EcoString>>,
|
||||
/// Stores the glyph remapper for each font for the subsetter.
|
||||
pub glyph_remappers: HashMap<Font, GlyphRemapper>,
|
||||
}
|
||||
|
||||
impl<R: Renumber> Renumber for Resources<R> {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.reference.renumber(offset);
|
||||
|
||||
if let Some(color_fonts) = &mut self.color_fonts {
|
||||
color_fonts.resources.renumber(offset);
|
||||
}
|
||||
|
||||
if let Some(patterns) = &mut self.patterns {
|
||||
patterns.resources.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Resources<()> {
|
||||
fn default() -> Self {
|
||||
Resources {
|
||||
reference: (),
|
||||
colors: ColorSpaces::default(),
|
||||
fonts: Remapper::new("F"),
|
||||
images: Remapper::new("Im"),
|
||||
deferred_images: HashMap::new(),
|
||||
gradients: Remapper::new("Gr"),
|
||||
patterns: None,
|
||||
ext_gs: Remapper::new("Gs"),
|
||||
color_fonts: None,
|
||||
languages: BTreeMap::new(),
|
||||
glyph_sets: HashMap::new(),
|
||||
color_glyph_sets: HashMap::new(),
|
||||
glyph_remappers: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Resources<()> {
|
||||
/// Associate a reference with this resource dictionary (and do so
|
||||
/// recursively for sub-resources).
|
||||
pub fn with_refs(self, refs: &ResourcesRefs) -> Resources<Ref> {
|
||||
Resources {
|
||||
reference: refs.reference,
|
||||
colors: self.colors,
|
||||
fonts: self.fonts,
|
||||
images: self.images,
|
||||
deferred_images: self.deferred_images,
|
||||
gradients: self.gradients,
|
||||
patterns: self
|
||||
.patterns
|
||||
.zip(refs.patterns.as_ref())
|
||||
.map(|(p, r)| Box::new(p.with_refs(r))),
|
||||
ext_gs: self.ext_gs,
|
||||
color_fonts: self
|
||||
.color_fonts
|
||||
.zip(refs.color_fonts.as_ref())
|
||||
.map(|(c, r)| Box::new(c.with_refs(r))),
|
||||
languages: self.languages,
|
||||
glyph_sets: self.glyph_sets,
|
||||
color_glyph_sets: self.color_glyph_sets,
|
||||
glyph_remappers: self.glyph_remappers,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R> Resources<R> {
|
||||
/// Run a function on this resource dictionary and all
|
||||
/// of its sub-resources.
|
||||
pub fn traverse<P>(&self, process: &mut P) -> SourceResult<()>
|
||||
where
|
||||
P: FnMut(&Self) -> SourceResult<()>,
|
||||
{
|
||||
process(self)?;
|
||||
if let Some(color_fonts) = &self.color_fonts {
|
||||
color_fonts.resources.traverse(process)?;
|
||||
}
|
||||
if let Some(patterns) = &self.patterns {
|
||||
patterns.resources.traverse(process)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// References for a resource tree.
|
||||
///
|
||||
/// This structure is a tree too, that should have the same structure as the
|
||||
/// corresponding `Resources`.
|
||||
pub struct ResourcesRefs {
|
||||
pub reference: Ref,
|
||||
pub color_fonts: Option<Box<ResourcesRefs>>,
|
||||
pub patterns: Option<Box<ResourcesRefs>>,
|
||||
}
|
||||
|
||||
impl Renumber for ResourcesRefs {
|
||||
fn renumber(&mut self, offset: i32) {
|
||||
self.reference.renumber(offset);
|
||||
if let Some(color_fonts) = &mut self.color_fonts {
|
||||
color_fonts.renumber(offset);
|
||||
}
|
||||
if let Some(patterns) = &mut self.patterns {
|
||||
patterns.renumber(offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocate references for all resource dictionaries.
|
||||
pub fn alloc_resources_refs(
|
||||
context: &WithResources,
|
||||
) -> SourceResult<(PdfChunk, ResourcesRefs)> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
/// Recursively explore resource dictionaries and assign them references.
|
||||
fn refs_for(resources: &Resources<()>, chunk: &mut PdfChunk) -> ResourcesRefs {
|
||||
ResourcesRefs {
|
||||
reference: chunk.alloc(),
|
||||
color_fonts: resources
|
||||
.color_fonts
|
||||
.as_ref()
|
||||
.map(|c| Box::new(refs_for(&c.resources, chunk))),
|
||||
patterns: resources
|
||||
.patterns
|
||||
.as_ref()
|
||||
.map(|p| Box::new(refs_for(&p.resources, chunk))),
|
||||
}
|
||||
}
|
||||
|
||||
let refs = refs_for(&context.resources, &mut chunk);
|
||||
Ok((chunk, refs))
|
||||
}
|
||||
|
||||
/// Write the resource dictionaries that will be referenced by all pages.
|
||||
///
|
||||
/// We add a reference to this dictionary to each page individually instead of
|
||||
/// to the root node of the page tree because using the resource inheritance
|
||||
/// feature breaks PDF merging with Apple Preview.
|
||||
///
|
||||
/// Also write resource dictionaries for Type3 fonts and patterns.
|
||||
pub fn write_resource_dictionaries(ctx: &WithEverything) -> SourceResult<(PdfChunk, ())> {
|
||||
let mut chunk = PdfChunk::new();
|
||||
let mut used_color_spaces = ColorSpaces::default();
|
||||
|
||||
ctx.resources.traverse(&mut |resources| {
|
||||
used_color_spaces.merge(&resources.colors);
|
||||
|
||||
let images_ref = chunk.alloc.bump();
|
||||
let patterns_ref = chunk.alloc.bump();
|
||||
let ext_gs_states_ref = chunk.alloc.bump();
|
||||
let color_spaces_ref = chunk.alloc.bump();
|
||||
|
||||
let mut color_font_slices = Vec::new();
|
||||
let mut color_font_numbers = HashMap::new();
|
||||
if let Some(color_fonts) = &resources.color_fonts {
|
||||
for (_, font_slice) in color_fonts.iter() {
|
||||
color_font_numbers.insert(font_slice.clone(), color_font_slices.len());
|
||||
color_font_slices.push(font_slice);
|
||||
}
|
||||
}
|
||||
let color_font_remapper = Remapper {
|
||||
prefix: "Cf",
|
||||
to_pdf: color_font_numbers,
|
||||
to_items: color_font_slices,
|
||||
};
|
||||
|
||||
resources
|
||||
.images
|
||||
.write(&ctx.references.images, &mut chunk.indirect(images_ref).dict());
|
||||
|
||||
let mut patterns_dict = chunk.indirect(patterns_ref).dict();
|
||||
resources
|
||||
.gradients
|
||||
.write(&ctx.references.gradients, &mut patterns_dict);
|
||||
if let Some(p) = &resources.patterns {
|
||||
p.remapper.write(&ctx.references.patterns, &mut patterns_dict);
|
||||
}
|
||||
patterns_dict.finish();
|
||||
|
||||
resources
|
||||
.ext_gs
|
||||
.write(&ctx.references.ext_gs, &mut chunk.indirect(ext_gs_states_ref).dict());
|
||||
|
||||
let mut res_dict = chunk
|
||||
.indirect(resources.reference)
|
||||
.start::<pdf_writer::writers::Resources>();
|
||||
res_dict.pair(Name(b"XObject"), images_ref);
|
||||
res_dict.pair(Name(b"Pattern"), patterns_ref);
|
||||
res_dict.pair(Name(b"ExtGState"), ext_gs_states_ref);
|
||||
res_dict.pair(Name(b"ColorSpace"), color_spaces_ref);
|
||||
|
||||
// TODO: can't this be an indirect reference too?
|
||||
let mut fonts_dict = res_dict.fonts();
|
||||
resources.fonts.write(&ctx.references.fonts, &mut fonts_dict);
|
||||
color_font_remapper.write(&ctx.references.color_fonts, &mut fonts_dict);
|
||||
fonts_dict.finish();
|
||||
|
||||
res_dict.finish();
|
||||
|
||||
let color_spaces = chunk.indirect(color_spaces_ref).dict();
|
||||
resources
|
||||
.colors
|
||||
.write_color_spaces(color_spaces, &ctx.globals.color_functions);
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
used_color_spaces.write_functions(&mut chunk, &ctx.globals.color_functions);
|
||||
|
||||
Ok((chunk, ()))
|
||||
}
|
||||
|
||||
/// Assigns new, consecutive PDF-internal indices to items.
|
||||
pub struct Remapper<T> {
|
||||
/// The prefix to use when naming these resources.
|
||||
prefix: &'static str,
|
||||
/// Forwards from the items to the pdf indices.
|
||||
to_pdf: HashMap<T, usize>,
|
||||
/// Backwards from the pdf indices to the items.
|
||||
to_items: Vec<T>,
|
||||
}
|
||||
|
||||
impl<T> Remapper<T>
|
||||
where
|
||||
T: Eq + Hash + Clone,
|
||||
{
|
||||
/// Create an empty mapping.
|
||||
pub fn new(prefix: &'static str) -> Self {
|
||||
Self { prefix, to_pdf: HashMap::new(), to_items: vec![] }
|
||||
}
|
||||
|
||||
/// Insert an item in the mapping if it was not already present.
|
||||
pub fn insert(&mut self, item: T) -> usize {
|
||||
let to_layout = &mut self.to_items;
|
||||
*self.to_pdf.entry(item.clone()).or_insert_with(|| {
|
||||
let pdf_index = to_layout.len();
|
||||
to_layout.push(item);
|
||||
pdf_index
|
||||
})
|
||||
}
|
||||
|
||||
/// All items in this
|
||||
pub fn items(&self) -> impl Iterator<Item = &T> + '_ {
|
||||
self.to_items.iter()
|
||||
}
|
||||
|
||||
/// Write this list of items in a Resource dictionary.
|
||||
fn write(&self, mapping: &HashMap<T, Ref>, dict: &mut Dict) {
|
||||
for (number, item) in self.items().enumerate() {
|
||||
let name = eco_format!("{}{}", self.prefix, number);
|
||||
let reference = mapping[item];
|
||||
dict.pair(Name(name.as_bytes()), reference);
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user