mirror of
https://github.com/typst/typst
synced 2025-05-14 04:56:26 +08:00
PDF export diagnostics (#5073)
This commit is contained in:
parent
d5b1bf314e
commit
788ae10a07
@ -8,12 +8,15 @@ use codespan_reporting::term;
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||||
use typst::diag::{bail, Severity, SourceDiagnostic, StrResult, Warned};
|
use typst::diag::{
|
||||||
|
bail, At, Severity, SourceDiagnostic, SourceResult, StrResult, Warned,
|
||||||
|
};
|
||||||
use typst::foundations::{Datetime, Smart};
|
use typst::foundations::{Datetime, Smart};
|
||||||
use typst::layout::{Frame, Page, PageRanges};
|
use typst::layout::{Frame, Page, PageRanges};
|
||||||
use typst::model::Document;
|
use typst::model::Document;
|
||||||
use typst::syntax::{FileId, Source, Span};
|
use typst::syntax::{FileId, Source, Span};
|
||||||
use typst::WorldExt;
|
use typst::WorldExt;
|
||||||
|
use typst_pdf::PdfOptions;
|
||||||
|
|
||||||
use crate::args::{
|
use crate::args::{
|
||||||
CompileCommand, DiagnosticFormat, Input, Output, OutputFormat, PageRangeArgument,
|
CompileCommand, DiagnosticFormat, Input, Output, OutputFormat, PageRangeArgument,
|
||||||
@ -54,7 +57,11 @@ impl CompileCommand {
|
|||||||
Some(ext) if ext.eq_ignore_ascii_case("pdf") => OutputFormat::Pdf,
|
Some(ext) if ext.eq_ignore_ascii_case("pdf") => OutputFormat::Pdf,
|
||||||
Some(ext) if ext.eq_ignore_ascii_case("png") => OutputFormat::Png,
|
Some(ext) if ext.eq_ignore_ascii_case("png") => OutputFormat::Png,
|
||||||
Some(ext) if ext.eq_ignore_ascii_case("svg") => OutputFormat::Svg,
|
Some(ext) if ext.eq_ignore_ascii_case("svg") => OutputFormat::Svg,
|
||||||
_ => bail!("could not infer output format for path {}.\nconsider providing the format manually with `--format/-f`", output.display()),
|
_ => bail!(
|
||||||
|
"could not infer output format for path {}.\n\
|
||||||
|
consider providing the format manually with `--format/-f`",
|
||||||
|
output.display()
|
||||||
|
),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
OutputFormat::Pdf
|
OutputFormat::Pdf
|
||||||
@ -96,11 +103,11 @@ pub fn compile_once(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let Warned { output, warnings } = typst::compile(world);
|
let Warned { output, warnings } = typst::compile(world);
|
||||||
|
let result = output.and_then(|document| export(world, &document, command, watching));
|
||||||
|
|
||||||
match output {
|
match result {
|
||||||
// Export the PDF / PNG.
|
// Export the PDF / PNG.
|
||||||
Ok(document) => {
|
Ok(()) => {
|
||||||
export(world, &document, command, watching)?;
|
|
||||||
let duration = start.elapsed();
|
let duration = start.elapsed();
|
||||||
|
|
||||||
if watching {
|
if watching {
|
||||||
@ -150,29 +157,35 @@ fn export(
|
|||||||
document: &Document,
|
document: &Document,
|
||||||
command: &CompileCommand,
|
command: &CompileCommand,
|
||||||
watching: bool,
|
watching: bool,
|
||||||
) -> StrResult<()> {
|
) -> SourceResult<()> {
|
||||||
match command.output_format()? {
|
match command.output_format().at(Span::detached())? {
|
||||||
OutputFormat::Png => {
|
OutputFormat::Png => {
|
||||||
export_image(world, document, command, watching, ImageExportFormat::Png)
|
export_image(world, document, command, watching, ImageExportFormat::Png)
|
||||||
|
.at(Span::detached())
|
||||||
}
|
}
|
||||||
OutputFormat::Svg => {
|
OutputFormat::Svg => {
|
||||||
export_image(world, document, command, watching, ImageExportFormat::Svg)
|
export_image(world, document, command, watching, ImageExportFormat::Svg)
|
||||||
|
.at(Span::detached())
|
||||||
}
|
}
|
||||||
OutputFormat::Pdf => export_pdf(document, command),
|
OutputFormat::Pdf => export_pdf(document, command),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Export to a PDF.
|
/// Export to a PDF.
|
||||||
fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> {
|
fn export_pdf(document: &Document, command: &CompileCommand) -> SourceResult<()> {
|
||||||
let timestamp = convert_datetime(
|
let options = PdfOptions {
|
||||||
command.common.creation_timestamp.unwrap_or_else(chrono::Utc::now),
|
ident: Smart::Auto,
|
||||||
);
|
timestamp: convert_datetime(
|
||||||
let exported_page_ranges = command.exported_page_ranges();
|
command.common.creation_timestamp.unwrap_or_else(chrono::Utc::now),
|
||||||
let buffer = typst_pdf::pdf(document, Smart::Auto, timestamp, exported_page_ranges);
|
),
|
||||||
|
page_ranges: command.exported_page_ranges(),
|
||||||
|
};
|
||||||
|
let buffer = typst_pdf::pdf(document, &options)?;
|
||||||
command
|
command
|
||||||
.output()
|
.output()
|
||||||
.write(&buffer)
|
.write(&buffer)
|
||||||
.map_err(|err| eco_format!("failed to write PDF file ({err})"))?;
|
.map_err(|err| eco_format!("failed to write PDF file ({err})"))
|
||||||
|
.at(Span::detached())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
use typst::diag::StrResult;
|
|
||||||
use typst::text::FontVariant;
|
use typst::text::FontVariant;
|
||||||
use typst_kit::fonts::Fonts;
|
use typst_kit::fonts::Fonts;
|
||||||
|
|
||||||
use crate::args::FontsCommand;
|
use crate::args::FontsCommand;
|
||||||
|
|
||||||
/// Execute a font listing command.
|
/// Execute a font listing command.
|
||||||
pub fn fonts(command: &FontsCommand) -> StrResult<()> {
|
pub fn fonts(command: &FontsCommand) {
|
||||||
let fonts = Fonts::searcher()
|
let fonts = Fonts::searcher()
|
||||||
.include_system_fonts(!command.font_args.ignore_system_fonts)
|
.include_system_fonts(!command.font_args.ignore_system_fonts)
|
||||||
.search_with(&command.font_args.font_paths);
|
.search_with(&command.font_args.font_paths);
|
||||||
@ -19,6 +18,4 @@ pub fn fonts(command: &FontsCommand) -> StrResult<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,7 @@ fn dispatch() -> HintedStrResult<()> {
|
|||||||
Command::Watch(command) => crate::watch::watch(timer, command.clone())?,
|
Command::Watch(command) => crate::watch::watch(timer, command.clone())?,
|
||||||
Command::Init(command) => crate::init::init(command)?,
|
Command::Init(command) => crate::init::init(command)?,
|
||||||
Command::Query(command) => crate::query::query(command)?,
|
Command::Query(command) => crate::query::query(command)?,
|
||||||
Command::Fonts(command) => crate::fonts::fonts(command)?,
|
Command::Fonts(command) => crate::fonts::fonts(command),
|
||||||
Command::Update(command) => crate::update::update(command)?,
|
Command::Update(command) => crate::update::update(command)?,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ pub fn update(command: &UpdateCommand) -> StrResult<()> {
|
|||||||
|
|
||||||
if version < &Version::new(0, 8, 0) {
|
if version < &Version::new(0, 8, 0) {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"Note: Versions older than 0.8.0 will not have \
|
"note: versions older than 0.8.0 will not have \
|
||||||
the update command available."
|
the update command available."
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,26 +1,24 @@
|
|||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use pdf_writer::{
|
use pdf_writer::types::Direction;
|
||||||
types::Direction, writers::PageLabel, Finish, Name, Pdf, Ref, Str, TextStr,
|
use pdf_writer::writers::PageLabel;
|
||||||
};
|
use pdf_writer::{Finish, Name, Pdf, Ref, Str, TextStr};
|
||||||
use xmp_writer::{DateTime, LangId, RenditionClass, Timezone, XmpWriter};
|
use typst::diag::SourceResult;
|
||||||
|
|
||||||
use typst::foundations::{Datetime, Smart};
|
use typst::foundations::{Datetime, Smart};
|
||||||
use typst::layout::Dir;
|
use typst::layout::Dir;
|
||||||
use typst::text::Lang;
|
use typst::text::Lang;
|
||||||
|
use xmp_writer::{DateTime, LangId, RenditionClass, Timezone, XmpWriter};
|
||||||
|
|
||||||
use crate::WithEverything;
|
use crate::page::PdfPageLabel;
|
||||||
use crate::{hash_base64, outline, page::PdfPageLabel};
|
use crate::{hash_base64, outline, WithEverything};
|
||||||
|
|
||||||
/// Write the document catalog.
|
/// Write the document catalog.
|
||||||
pub fn write_catalog(
|
pub fn write_catalog(
|
||||||
ctx: WithEverything,
|
ctx: WithEverything,
|
||||||
ident: Smart<&str>,
|
|
||||||
timestamp: Option<Datetime>,
|
|
||||||
pdf: &mut Pdf,
|
pdf: &mut Pdf,
|
||||||
alloc: &mut Ref,
|
alloc: &mut Ref,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
let lang = ctx
|
let lang = ctx
|
||||||
.resources
|
.resources
|
||||||
.languages
|
.languages
|
||||||
@ -83,7 +81,7 @@ pub fn write_catalog(
|
|||||||
xmp.pdf_keywords(&joined);
|
xmp.pdf_keywords(&joined);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(date) = ctx.document.info.date.unwrap_or(timestamp) {
|
if let Some(date) = ctx.document.info.date.unwrap_or(ctx.options.timestamp) {
|
||||||
let tz = ctx.document.info.date.is_auto();
|
let tz = ctx.document.info.date.is_auto();
|
||||||
if let Some(pdf_date) = pdf_date(date, tz) {
|
if let Some(pdf_date) = pdf_date(date, tz) {
|
||||||
info.creation_date(pdf_date);
|
info.creation_date(pdf_date);
|
||||||
@ -106,7 +104,7 @@ pub fn write_catalog(
|
|||||||
|
|
||||||
// Determine the document's ID. It should be as stable as possible.
|
// Determine the document's ID. It should be as stable as possible.
|
||||||
const PDF_VERSION: &str = "PDF-1.7";
|
const PDF_VERSION: &str = "PDF-1.7";
|
||||||
let doc_id = if let Smart::Custom(ident) = ident {
|
let doc_id = if let Smart::Custom(ident) = ctx.options.ident {
|
||||||
// We were provided with a stable ID. Yay!
|
// We were provided with a stable ID. Yay!
|
||||||
hash_base64(&(PDF_VERSION, ident))
|
hash_base64(&(PDF_VERSION, ident))
|
||||||
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
|
} else if ctx.document.info.title.is_some() && !ctx.document.info.author.is_empty() {
|
||||||
@ -167,6 +165,8 @@ pub fn write_catalog(
|
|||||||
}
|
}
|
||||||
|
|
||||||
catalog.finish();
|
catalog.finish();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the page labels.
|
/// Write the page labels.
|
||||||
@ -184,8 +184,8 @@ pub(crate) fn write_page_labels(
|
|||||||
return Vec::new();
|
return Vec::new();
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut result = vec![];
|
|
||||||
let empty_label = PdfPageLabel::default();
|
let empty_label = PdfPageLabel::default();
|
||||||
|
let mut result = vec![];
|
||||||
let mut prev: Option<&PdfPageLabel> = None;
|
let mut prev: Option<&PdfPageLabel> = None;
|
||||||
|
|
||||||
// Skip non-exported pages for numbering.
|
// Skip non-exported pages for numbering.
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use arrayvec::ArrayVec;
|
use arrayvec::ArrayVec;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
|
use pdf_writer::{writers, Chunk, Dict, Filter, Name, Ref};
|
||||||
|
use typst::diag::SourceResult;
|
||||||
use typst::visualize::{Color, ColorSpace, Paint};
|
use typst::visualize::{Color, ColorSpace, Paint};
|
||||||
|
|
||||||
use crate::{content, deflate, PdfChunk, Renumber, WithResources};
|
use crate::{content, deflate, PdfChunk, Renumber, WithResources};
|
||||||
@ -142,20 +143,21 @@ impl Renumber for ColorFunctionRefs {
|
|||||||
/// Allocate all necessary [`ColorFunctionRefs`].
|
/// Allocate all necessary [`ColorFunctionRefs`].
|
||||||
pub fn alloc_color_functions_refs(
|
pub fn alloc_color_functions_refs(
|
||||||
context: &WithResources,
|
context: &WithResources,
|
||||||
) -> (PdfChunk, ColorFunctionRefs) {
|
) -> SourceResult<(PdfChunk, ColorFunctionRefs)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut used_color_spaces = ColorSpaces::default();
|
let mut used_color_spaces = ColorSpaces::default();
|
||||||
|
|
||||||
context.resources.traverse(&mut |r| {
|
context.resources.traverse(&mut |r| {
|
||||||
used_color_spaces.merge(&r.colors);
|
used_color_spaces.merge(&r.colors);
|
||||||
});
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
let refs = ColorFunctionRefs {
|
let refs = ColorFunctionRefs {
|
||||||
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
|
srgb: if used_color_spaces.use_srgb { Some(chunk.alloc()) } else { None },
|
||||||
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
|
d65_gray: if used_color_spaces.use_d65_gray { Some(chunk.alloc()) } else { None },
|
||||||
};
|
};
|
||||||
|
|
||||||
(chunk, refs)
|
Ok((chunk, refs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes the color into four f32s, which can be used in a PDF file.
|
/// Encodes the color into four f32s, which can be used in a PDF file.
|
||||||
@ -193,7 +195,7 @@ pub(super) trait PaintEncode {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
);
|
) -> SourceResult<()>;
|
||||||
|
|
||||||
/// Set the paint as the stroke color.
|
/// Set the paint as the stroke color.
|
||||||
fn set_as_stroke(
|
fn set_as_stroke(
|
||||||
@ -201,7 +203,7 @@ pub(super) trait PaintEncode {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
);
|
) -> SourceResult<()>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PaintEncode for Paint {
|
impl PaintEncode for Paint {
|
||||||
@ -210,7 +212,7 @@ impl PaintEncode for Paint {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
|
Self::Solid(c) => c.set_as_fill(ctx, on_text, transforms),
|
||||||
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
|
Self::Gradient(gradient) => gradient.set_as_fill(ctx, on_text, transforms),
|
||||||
@ -223,7 +225,7 @@ impl PaintEncode for Paint {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
|
Self::Solid(c) => c.set_as_stroke(ctx, on_text, transforms),
|
||||||
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
|
Self::Gradient(gradient) => gradient.set_as_stroke(ctx, on_text, transforms),
|
||||||
@ -233,7 +235,12 @@ impl PaintEncode for Paint {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PaintEncode for Color {
|
impl PaintEncode for Color {
|
||||||
fn set_as_fill(&self, ctx: &mut content::Builder, _: bool, _: content::Transforms) {
|
fn set_as_fill(
|
||||||
|
&self,
|
||||||
|
ctx: &mut content::Builder,
|
||||||
|
_: bool,
|
||||||
|
_: content::Transforms,
|
||||||
|
) -> SourceResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Color::Luma(_) => {
|
Color::Luma(_) => {
|
||||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||||
@ -268,9 +275,15 @@ impl PaintEncode for Color {
|
|||||||
ctx.content.set_fill_cmyk(c, m, y, k);
|
ctx.content.set_fill_cmyk(c, m, y, k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_as_stroke(&self, ctx: &mut content::Builder, _: bool, _: content::Transforms) {
|
fn set_as_stroke(
|
||||||
|
&self,
|
||||||
|
ctx: &mut content::Builder,
|
||||||
|
_: bool,
|
||||||
|
_: content::Transforms,
|
||||||
|
) -> SourceResult<()> {
|
||||||
match self {
|
match self {
|
||||||
Color::Luma(_) => {
|
Color::Luma(_) => {
|
||||||
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
ctx.resources.colors.mark_as_used(ColorSpace::D65Gray);
|
||||||
@ -305,6 +318,7 @@ impl PaintEncode for Color {
|
|||||||
ctx.content.set_stroke_cmyk(c, m, y, k);
|
ctx.content.set_stroke_cmyk(c, m, y, k);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,20 +9,18 @@ use std::collections::HashMap;
|
|||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use pdf_writer::Filter;
|
use pdf_writer::types::UnicodeCmap;
|
||||||
use pdf_writer::{types::UnicodeCmap, Finish, Name, Rect, Ref};
|
use pdf_writer::{Filter, Finish, Name, Rect, Ref};
|
||||||
use ttf_parser::name_id;
|
use ttf_parser::name_id;
|
||||||
|
use typst::diag::SourceResult;
|
||||||
use typst::layout::Em;
|
use typst::layout::Em;
|
||||||
use typst::text::{color::frame_for_glyph, Font};
|
use typst::text::color::frame_for_glyph;
|
||||||
|
use typst::text::Font;
|
||||||
|
|
||||||
|
use crate::content;
|
||||||
|
use crate::font::{subset_tag, write_font_descriptor, CMAP_NAME, SYSTEM_INFO};
|
||||||
use crate::resources::{Resources, ResourcesRefs};
|
use crate::resources::{Resources, ResourcesRefs};
|
||||||
use crate::WithGlobalRefs;
|
use crate::{EmExt, PdfChunk, PdfOptions, WithGlobalRefs};
|
||||||
use crate::{
|
|
||||||
content,
|
|
||||||
font::{subset_tag, write_font_descriptor, CMAP_NAME, SYSTEM_INFO},
|
|
||||||
EmExt, PdfChunk,
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Write color fonts in the PDF document.
|
/// Write color fonts in the PDF document.
|
||||||
///
|
///
|
||||||
@ -30,12 +28,12 @@ use crate::{
|
|||||||
/// instructions.
|
/// instructions.
|
||||||
pub fn write_color_fonts(
|
pub fn write_color_fonts(
|
||||||
context: &WithGlobalRefs,
|
context: &WithGlobalRefs,
|
||||||
) -> (PdfChunk, HashMap<ColorFontSlice, Ref>) {
|
) -> SourceResult<(PdfChunk, HashMap<ColorFontSlice, Ref>)> {
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
context.resources.traverse(&mut |resources: &Resources| {
|
context.resources.traverse(&mut |resources: &Resources| {
|
||||||
let Some(color_fonts) = &resources.color_fonts else {
|
let Some(color_fonts) = &resources.color_fonts else {
|
||||||
return;
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
for (color_font, font_slice) in color_fonts.iter() {
|
for (color_font, font_slice) in color_fonts.iter() {
|
||||||
@ -151,9 +149,11 @@ pub fn write_color_fonts(
|
|||||||
|
|
||||||
out.insert(font_slice, subfont_id);
|
out.insert(font_slice, subfont_id);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
|
/// A mapping between `Font`s and all the corresponding `ColorFont`s.
|
||||||
@ -213,7 +213,12 @@ impl ColorFontMap<()> {
|
|||||||
///
|
///
|
||||||
/// If this is the first occurrence of this glyph in this font, it will
|
/// If this is the first occurrence of this glyph in this font, it will
|
||||||
/// start its encoding and add it to the list of known glyphs.
|
/// start its encoding and add it to the list of known glyphs.
|
||||||
pub fn get(&mut self, font: &Font, gid: u16) -> (usize, u8) {
|
pub fn get(
|
||||||
|
&mut self,
|
||||||
|
options: &PdfOptions,
|
||||||
|
font: &Font,
|
||||||
|
gid: u16,
|
||||||
|
) -> SourceResult<(usize, u8)> {
|
||||||
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
|
let color_font = self.map.entry(font.clone()).or_insert_with(|| {
|
||||||
let global_bbox = font.ttf().global_bounding_box();
|
let global_bbox = font.ttf().global_bounding_box();
|
||||||
let bbox = Rect::new(
|
let bbox = Rect::new(
|
||||||
@ -230,7 +235,7 @@ impl ColorFontMap<()> {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(index_of_glyph) = color_font.glyph_indices.get(&gid) {
|
Ok(if let Some(index_of_glyph) = color_font.glyph_indices.get(&gid) {
|
||||||
// If we already know this glyph, return it.
|
// If we already know this glyph, return it.
|
||||||
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
|
(color_font.slice_ids[index_of_glyph / 256], *index_of_glyph as u8)
|
||||||
} else {
|
} else {
|
||||||
@ -245,13 +250,18 @@ impl ColorFontMap<()> {
|
|||||||
let frame = frame_for_glyph(font, gid);
|
let frame = frame_for_glyph(font, gid);
|
||||||
let width =
|
let width =
|
||||||
font.advance(gid).unwrap_or(Em::new(0.0)).get() * font.units_per_em();
|
font.advance(gid).unwrap_or(Em::new(0.0)).get() * font.units_per_em();
|
||||||
let instructions =
|
let instructions = content::build(
|
||||||
content::build(&mut self.resources, &frame, None, Some(width as f32));
|
options,
|
||||||
|
&mut self.resources,
|
||||||
|
&frame,
|
||||||
|
None,
|
||||||
|
Some(width as f32),
|
||||||
|
)?;
|
||||||
color_font.glyphs.push(ColorGlyph { gid, instructions });
|
color_font.glyphs.push(ColorGlyph { gid, instructions });
|
||||||
color_font.glyph_indices.insert(gid, index);
|
color_font.glyph_indices.insert(gid, index);
|
||||||
|
|
||||||
(color_font.slice_ids[index / 256], index as u8)
|
(color_font.slice_ids[index / 256], index as u8)
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Assign references to the resource dictionary used by this set of color
|
/// Assign references to the resource dictionary used by this set of color
|
||||||
|
@ -5,26 +5,30 @@
|
|||||||
//! See also [`pdf_writer::Content`].
|
//! See also [`pdf_writer::Content`].
|
||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use pdf_writer::{
|
use pdf_writer::types::{
|
||||||
types::{ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode},
|
ColorSpaceOperand, LineCapStyle, LineJoinStyle, TextRenderingMode,
|
||||||
Content, Finish, Name, Rect, Str,
|
|
||||||
};
|
};
|
||||||
|
use pdf_writer::{Content, Finish, Name, Rect, Str};
|
||||||
|
use typst::diag::SourceResult;
|
||||||
use typst::layout::{
|
use typst::layout::{
|
||||||
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
|
Abs, Em, Frame, FrameItem, GroupItem, Point, Ratio, Size, Transform,
|
||||||
};
|
};
|
||||||
use typst::model::Destination;
|
use typst::model::Destination;
|
||||||
use typst::text::{color::is_color_glyph, Font, TextItem, TextItemView};
|
use typst::syntax::Span;
|
||||||
|
use typst::text::color::is_color_glyph;
|
||||||
|
use typst::text::{Font, TextItem, TextItemView};
|
||||||
use typst::utils::{Deferred, Numeric, SliceExt};
|
use typst::utils::{Deferred, Numeric, SliceExt};
|
||||||
use typst::visualize::{
|
use typst::visualize::{
|
||||||
FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint, Path, PathItem,
|
FillRule, FixedStroke, Geometry, Image, LineCap, LineJoin, Paint, Path, PathItem,
|
||||||
Shape,
|
Shape,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::color::PaintEncode;
|
||||||
use crate::color_font::ColorFontMap;
|
use crate::color_font::ColorFontMap;
|
||||||
use crate::extg::ExtGState;
|
use crate::extg::ExtGState;
|
||||||
use crate::image::deferred_image;
|
use crate::image::deferred_image;
|
||||||
use crate::{color::PaintEncode, resources::Resources};
|
use crate::resources::Resources;
|
||||||
use crate::{deflate_deferred, AbsExt, EmExt};
|
use crate::{deflate_deferred, AbsExt, EmExt, PdfOptions};
|
||||||
|
|
||||||
/// Encode a [`Frame`] into a content stream.
|
/// Encode a [`Frame`] into a content stream.
|
||||||
///
|
///
|
||||||
@ -35,13 +39,14 @@ use crate::{deflate_deferred, AbsExt, EmExt};
|
|||||||
///
|
///
|
||||||
/// [color glyph]: `crate::color_font`
|
/// [color glyph]: `crate::color_font`
|
||||||
pub fn build(
|
pub fn build(
|
||||||
|
options: &PdfOptions,
|
||||||
resources: &mut Resources<()>,
|
resources: &mut Resources<()>,
|
||||||
frame: &Frame,
|
frame: &Frame,
|
||||||
fill: Option<Paint>,
|
fill: Option<Paint>,
|
||||||
color_glyph_width: Option<f32>,
|
color_glyph_width: Option<f32>,
|
||||||
) -> Encoded {
|
) -> SourceResult<Encoded> {
|
||||||
let size = frame.size();
|
let size = frame.size();
|
||||||
let mut ctx = Builder::new(resources, size);
|
let mut ctx = Builder::new(options, resources, size);
|
||||||
|
|
||||||
if let Some(width) = color_glyph_width {
|
if let Some(width) = color_glyph_width {
|
||||||
ctx.content.start_color_glyph(width);
|
ctx.content.start_color_glyph(width);
|
||||||
@ -57,18 +62,18 @@ pub fn build(
|
|||||||
|
|
||||||
if let Some(fill) = fill {
|
if let Some(fill) = fill {
|
||||||
let shape = Geometry::Rect(frame.size()).filled(fill);
|
let shape = Geometry::Rect(frame.size()).filled(fill);
|
||||||
write_shape(&mut ctx, Point::zero(), &shape);
|
write_shape(&mut ctx, Point::zero(), &shape)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Encode the frame into the content stream.
|
// Encode the frame into the content stream.
|
||||||
write_frame(&mut ctx, frame);
|
write_frame(&mut ctx, frame)?;
|
||||||
|
|
||||||
Encoded {
|
Ok(Encoded {
|
||||||
size,
|
size,
|
||||||
content: deflate_deferred(ctx.content.finish()),
|
content: deflate_deferred(ctx.content.finish()),
|
||||||
uses_opacities: ctx.uses_opacities,
|
uses_opacities: ctx.uses_opacities,
|
||||||
links: ctx.links,
|
links: ctx.links,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An encoded content stream.
|
/// An encoded content stream.
|
||||||
@ -91,6 +96,8 @@ pub struct Encoded {
|
|||||||
/// Content streams can be used for page contents, but also to describe color
|
/// Content streams can be used for page contents, but also to describe color
|
||||||
/// glyphs and patterns.
|
/// glyphs and patterns.
|
||||||
pub struct Builder<'a, R = ()> {
|
pub struct Builder<'a, R = ()> {
|
||||||
|
/// Settings for PDF export.
|
||||||
|
pub(crate) options: &'a PdfOptions<'a>,
|
||||||
/// A list of all resources that are used in the content stream.
|
/// A list of all resources that are used in the content stream.
|
||||||
pub(crate) resources: &'a mut Resources<R>,
|
pub(crate) resources: &'a mut Resources<R>,
|
||||||
/// The PDF content stream that is being built.
|
/// The PDF content stream that is being built.
|
||||||
@ -107,8 +114,13 @@ pub struct Builder<'a, R = ()> {
|
|||||||
|
|
||||||
impl<'a, R> Builder<'a, R> {
|
impl<'a, R> Builder<'a, R> {
|
||||||
/// Create a new content builder.
|
/// Create a new content builder.
|
||||||
pub fn new(resources: &'a mut Resources<R>, size: Size) -> Self {
|
pub fn new(
|
||||||
|
options: &'a PdfOptions<'a>,
|
||||||
|
resources: &'a mut Resources<R>,
|
||||||
|
size: Size,
|
||||||
|
) -> Self {
|
||||||
Builder {
|
Builder {
|
||||||
|
options,
|
||||||
resources,
|
resources,
|
||||||
uses_opacities: false,
|
uses_opacities: false,
|
||||||
content: Content::new(),
|
content: Content::new(),
|
||||||
@ -187,9 +199,10 @@ pub(super) struct Transforms {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Builder<'_, ()> {
|
impl Builder<'_, ()> {
|
||||||
fn save_state(&mut self) {
|
fn save_state(&mut self) -> SourceResult<()> {
|
||||||
self.saves.push(self.state.clone());
|
self.saves.push(self.state.clone());
|
||||||
self.content.save_state();
|
self.content.save_state();
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restore_state(&mut self) {
|
fn restore_state(&mut self) {
|
||||||
@ -267,13 +280,19 @@ impl Builder<'_, ()> {
|
|||||||
self.state.size = size;
|
self.state.size = size;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_fill(&mut self, fill: &Paint, on_text: bool, transforms: Transforms) {
|
fn set_fill(
|
||||||
|
&mut self,
|
||||||
|
fill: &Paint,
|
||||||
|
on_text: bool,
|
||||||
|
transforms: Transforms,
|
||||||
|
) -> SourceResult<()> {
|
||||||
if self.state.fill.as_ref() != Some(fill)
|
if self.state.fill.as_ref() != Some(fill)
|
||||||
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
|
|| matches!(self.state.fill, Some(Paint::Gradient(_)))
|
||||||
{
|
{
|
||||||
fill.set_as_fill(self, on_text, transforms);
|
fill.set_as_fill(self, on_text, transforms)?;
|
||||||
self.state.fill = Some(fill.clone());
|
self.state.fill = Some(fill.clone());
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
|
pub fn set_fill_color_space(&mut self, space: Name<'static>) {
|
||||||
@ -292,7 +311,7 @@ impl Builder<'_, ()> {
|
|||||||
stroke: &FixedStroke,
|
stroke: &FixedStroke,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: Transforms,
|
transforms: Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
if self.state.stroke.as_ref() != Some(stroke)
|
if self.state.stroke.as_ref() != Some(stroke)
|
||||||
|| matches!(
|
|| matches!(
|
||||||
self.state.stroke.as_ref().map(|s| &s.paint),
|
self.state.stroke.as_ref().map(|s| &s.paint),
|
||||||
@ -300,7 +319,7 @@ impl Builder<'_, ()> {
|
|||||||
)
|
)
|
||||||
{
|
{
|
||||||
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
|
let FixedStroke { paint, thickness, cap, join, dash, miter_limit } = stroke;
|
||||||
paint.set_as_stroke(self, on_text, transforms);
|
paint.set_as_stroke(self, on_text, transforms)?;
|
||||||
|
|
||||||
self.content.set_line_width(thickness.to_f32());
|
self.content.set_line_width(thickness.to_f32());
|
||||||
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
|
if self.state.stroke.as_ref().map(|s| &s.cap) != Some(cap) {
|
||||||
@ -324,6 +343,8 @@ impl Builder<'_, ()> {
|
|||||||
}
|
}
|
||||||
self.state.stroke = Some(stroke.clone());
|
self.state.stroke = Some(stroke.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
|
pub fn set_stroke_color_space(&mut self, space: Name<'static>) {
|
||||||
@ -346,26 +367,29 @@ impl Builder<'_, ()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a frame into the content stream.
|
/// Encode a frame into the content stream.
|
||||||
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) {
|
pub(crate) fn write_frame(ctx: &mut Builder, frame: &Frame) -> SourceResult<()> {
|
||||||
for &(pos, ref item) in frame.items() {
|
for &(pos, ref item) in frame.items() {
|
||||||
let x = pos.x.to_f32();
|
let x = pos.x.to_f32();
|
||||||
let y = pos.y.to_f32();
|
let y = pos.y.to_f32();
|
||||||
match item {
|
match item {
|
||||||
FrameItem::Group(group) => write_group(ctx, pos, group),
|
FrameItem::Group(group) => write_group(ctx, pos, group)?,
|
||||||
FrameItem::Text(text) => write_text(ctx, pos, text),
|
FrameItem::Text(text) => write_text(ctx, pos, text)?,
|
||||||
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape),
|
FrameItem::Shape(shape, _) => write_shape(ctx, pos, shape)?,
|
||||||
FrameItem::Image(image, size, _) => write_image(ctx, x, y, image, *size),
|
FrameItem::Image(image, size, span) => {
|
||||||
|
write_image(ctx, x, y, image, *size, *span)?
|
||||||
|
}
|
||||||
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
|
FrameItem::Link(dest, size) => write_link(ctx, pos, dest, *size),
|
||||||
FrameItem::Tag(_) => {}
|
FrameItem::Tag(_) => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a group into the content stream.
|
/// Encode a group into the content stream.
|
||||||
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) {
|
fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) -> SourceResult<()> {
|
||||||
let translation = Transform::translate(pos.x, pos.y);
|
let translation = Transform::translate(pos.x, pos.y);
|
||||||
|
|
||||||
ctx.save_state();
|
ctx.save_state()?;
|
||||||
|
|
||||||
if group.frame.kind().is_hard() {
|
if group.frame.kind().is_hard() {
|
||||||
ctx.group_transform(
|
ctx.group_transform(
|
||||||
@ -385,12 +409,14 @@ fn write_group(ctx: &mut Builder, pos: Point, group: &GroupItem) {
|
|||||||
ctx.content.end_path();
|
ctx.content.end_path();
|
||||||
}
|
}
|
||||||
|
|
||||||
write_frame(ctx, &group.frame);
|
write_frame(ctx, &group.frame)?;
|
||||||
ctx.restore_state();
|
ctx.restore_state();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a text run into the content stream.
|
/// Encode a text run into the content stream.
|
||||||
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) {
|
fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) -> SourceResult<()> {
|
||||||
let ttf = text.font.ttf();
|
let ttf = text.font.ttf();
|
||||||
let tables = ttf.tables();
|
let tables = ttf.tables();
|
||||||
|
|
||||||
@ -401,17 +427,17 @@ fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) {
|
|||||||
|| tables.svg.is_some()
|
|| tables.svg.is_some()
|
||||||
|| tables.colr.is_some();
|
|| tables.colr.is_some();
|
||||||
if !has_color_glyphs {
|
if !has_color_glyphs {
|
||||||
write_normal_text(ctx, pos, TextItemView::all_of(text));
|
write_normal_text(ctx, pos, TextItemView::all_of(text))?;
|
||||||
return;
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let color_glyph_count =
|
let color_glyph_count =
|
||||||
text.glyphs.iter().filter(|g| is_color_glyph(&text.font, g)).count();
|
text.glyphs.iter().filter(|g| is_color_glyph(&text.font, g)).count();
|
||||||
|
|
||||||
if color_glyph_count == text.glyphs.len() {
|
if color_glyph_count == text.glyphs.len() {
|
||||||
write_color_glyphs(ctx, pos, TextItemView::all_of(text));
|
write_color_glyphs(ctx, pos, TextItemView::all_of(text))?;
|
||||||
} else if color_glyph_count == 0 {
|
} else if color_glyph_count == 0 {
|
||||||
write_normal_text(ctx, pos, TextItemView::all_of(text));
|
write_normal_text(ctx, pos, TextItemView::all_of(text))?;
|
||||||
} else {
|
} else {
|
||||||
// Otherwise we need to split it in smaller text runs
|
// Otherwise we need to split it in smaller text runs
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
@ -430,16 +456,22 @@ fn write_text(ctx: &mut Builder, pos: Point, text: &TextItem) {
|
|||||||
offset = end;
|
offset = end;
|
||||||
// Actually write the sub text-run
|
// Actually write the sub text-run
|
||||||
if color {
|
if color {
|
||||||
write_color_glyphs(ctx, pos, text_item_view);
|
write_color_glyphs(ctx, pos, text_item_view)?;
|
||||||
} else {
|
} else {
|
||||||
write_normal_text(ctx, pos, text_item_view);
|
write_normal_text(ctx, pos, text_item_view)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes a text run (without any color glyph) into the content stream.
|
/// Encodes a text run (without any color glyph) into the content stream.
|
||||||
fn write_normal_text(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
fn write_normal_text(
|
||||||
|
ctx: &mut Builder,
|
||||||
|
pos: Point,
|
||||||
|
text: TextItemView,
|
||||||
|
) -> SourceResult<()> {
|
||||||
let x = pos.x.to_f32();
|
let x = pos.x.to_f32();
|
||||||
let y = pos.y.to_f32();
|
let y = pos.y.to_f32();
|
||||||
|
|
||||||
@ -453,7 +485,7 @@ fn write_normal_text(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let fill_transform = ctx.state.transforms(Size::zero(), pos);
|
let fill_transform = ctx.state.transforms(Size::zero(), pos);
|
||||||
ctx.set_fill(&text.item.fill, true, fill_transform);
|
ctx.set_fill(&text.item.fill, true, fill_transform)?;
|
||||||
|
|
||||||
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
|
let stroke = text.item.stroke.as_ref().and_then(|stroke| {
|
||||||
if stroke.thickness.to_f32() > 0.0 {
|
if stroke.thickness.to_f32() > 0.0 {
|
||||||
@ -464,7 +496,7 @@ fn write_normal_text(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if let Some(stroke) = stroke {
|
if let Some(stroke) = stroke {
|
||||||
ctx.set_stroke(stroke, true, fill_transform);
|
ctx.set_stroke(stroke, true, fill_transform)?;
|
||||||
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
|
ctx.set_text_rendering_mode(TextRenderingMode::FillStroke);
|
||||||
} else {
|
} else {
|
||||||
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
|
ctx.set_text_rendering_mode(TextRenderingMode::Fill);
|
||||||
@ -539,10 +571,16 @@ fn write_normal_text(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
|||||||
items.finish();
|
items.finish();
|
||||||
positioned.finish();
|
positioned.finish();
|
||||||
ctx.content.end_text();
|
ctx.content.end_text();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes a text run made only of color glyphs into the content stream
|
/// Encodes a text run made only of color glyphs into the content stream
|
||||||
fn write_color_glyphs(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
fn write_color_glyphs(
|
||||||
|
ctx: &mut Builder,
|
||||||
|
pos: Point,
|
||||||
|
text: TextItemView,
|
||||||
|
) -> SourceResult<()> {
|
||||||
let x = pos.x.to_f32();
|
let x = pos.x.to_f32();
|
||||||
let y = pos.y.to_f32();
|
let y = pos.y.to_f32();
|
||||||
|
|
||||||
@ -568,7 +606,7 @@ fn write_color_glyphs(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
|||||||
.resources
|
.resources
|
||||||
.color_fonts
|
.color_fonts
|
||||||
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
|
.get_or_insert_with(|| Box::new(ColorFontMap::new()));
|
||||||
let (font, index) = color_fonts.get(&text.item.font, glyph.id);
|
let (font, index) = color_fonts.get(ctx.options, &text.item.font, glyph.id)?;
|
||||||
|
|
||||||
if last_font != Some(font) {
|
if last_font != Some(font) {
|
||||||
ctx.content.set_font(
|
ctx.content.set_font(
|
||||||
@ -585,10 +623,12 @@ fn write_color_glyphs(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
|||||||
.or_insert_with(|| text.text()[glyph.range()].into());
|
.or_insert_with(|| text.text()[glyph.range()].into());
|
||||||
}
|
}
|
||||||
ctx.content.end_text();
|
ctx.content.end_text();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a geometrical shape into the content stream.
|
/// Encode a geometrical shape into the content stream.
|
||||||
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) {
|
fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) -> SourceResult<()> {
|
||||||
let x = pos.x.to_f32();
|
let x = pos.x.to_f32();
|
||||||
let y = pos.y.to_f32();
|
let y = pos.y.to_f32();
|
||||||
|
|
||||||
@ -601,11 +641,11 @@ fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if shape.fill.is_none() && stroke.is_none() {
|
if shape.fill.is_none() && stroke.is_none() {
|
||||||
return;
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(fill) = &shape.fill {
|
if let Some(fill) = &shape.fill {
|
||||||
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos));
|
ctx.set_fill(fill, false, ctx.state.transforms(shape.geometry.bbox_size(), pos))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(stroke) = stroke {
|
if let Some(stroke) = stroke {
|
||||||
@ -613,7 +653,7 @@ fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) {
|
|||||||
stroke,
|
stroke,
|
||||||
false,
|
false,
|
||||||
ctx.state.transforms(shape.geometry.bbox_size(), pos),
|
ctx.state.transforms(shape.geometry.bbox_size(), pos),
|
||||||
);
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.set_opacities(stroke, shape.fill.as_ref());
|
ctx.set_opacities(stroke, shape.fill.as_ref());
|
||||||
@ -645,6 +685,8 @@ fn write_shape(ctx: &mut Builder, pos: Point, shape: &Shape) {
|
|||||||
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
|
(Some(_), FillRule::NonZero, Some(_)) => ctx.content.fill_nonzero_and_stroke(),
|
||||||
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
|
(Some(_), FillRule::EvenOdd, Some(_)) => ctx.content.fill_even_odd_and_stroke(),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a bezier path into the content stream.
|
/// Encode a bezier path into the content stream.
|
||||||
@ -671,14 +713,21 @@ fn write_path(ctx: &mut Builder, x: f32, y: f32, path: &Path) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Encode a vector or raster image into the content stream.
|
/// Encode a vector or raster image into the content stream.
|
||||||
fn write_image(ctx: &mut Builder, x: f32, y: f32, image: &Image, size: Size) {
|
fn write_image(
|
||||||
|
ctx: &mut Builder,
|
||||||
|
x: f32,
|
||||||
|
y: f32,
|
||||||
|
image: &Image,
|
||||||
|
size: Size,
|
||||||
|
span: Span,
|
||||||
|
) -> SourceResult<()> {
|
||||||
let index = ctx.resources.images.insert(image.clone());
|
let index = ctx.resources.images.insert(image.clone());
|
||||||
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
|
ctx.resources.deferred_images.entry(index).or_insert_with(|| {
|
||||||
let (image, color_space) = deferred_image(image.clone());
|
let (image, color_space) = deferred_image(image.clone());
|
||||||
if let Some(color_space) = color_space {
|
if let Some(color_space) = color_space {
|
||||||
ctx.resources.colors.mark_as_used(color_space);
|
ctx.resources.colors.mark_as_used(color_space);
|
||||||
}
|
}
|
||||||
image
|
(image, span)
|
||||||
});
|
});
|
||||||
|
|
||||||
ctx.reset_opacities();
|
ctx.reset_opacities();
|
||||||
@ -693,7 +742,7 @@ fn write_image(ctx: &mut Builder, x: f32, y: f32, image: &Image, size: Size) {
|
|||||||
let mut image_span =
|
let mut image_span =
|
||||||
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
|
ctx.content.begin_marked_content_with_properties(Name(b"Span"));
|
||||||
let mut image_alt = image_span.properties();
|
let mut image_alt = image_span.properties();
|
||||||
image_alt.pair(Name(b"Alt"), pdf_writer::Str(alt.as_bytes()));
|
image_alt.pair(Name(b"Alt"), Str(alt.as_bytes()));
|
||||||
image_alt.finish();
|
image_alt.finish();
|
||||||
image_span.finish();
|
image_span.finish();
|
||||||
|
|
||||||
@ -704,6 +753,7 @@ fn write_image(ctx: &mut Builder, x: f32, y: f32, image: &Image, size: Size) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ctx.content.restore_state();
|
ctx.content.restore_state();
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Save a link for later writing in the annotations dictionary.
|
/// Save a link for later writing in the annotations dictionary.
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use pdf_writer::Ref;
|
use pdf_writer::Ref;
|
||||||
|
use typst::diag::SourceResult;
|
||||||
|
|
||||||
use crate::{PdfChunk, WithGlobalRefs};
|
use crate::{PdfChunk, WithGlobalRefs};
|
||||||
|
|
||||||
@ -28,7 +29,7 @@ impl ExtGState {
|
|||||||
/// Embed all used external graphics states into the PDF.
|
/// Embed all used external graphics states into the PDF.
|
||||||
pub fn write_graphic_states(
|
pub fn write_graphic_states(
|
||||||
context: &WithGlobalRefs,
|
context: &WithGlobalRefs,
|
||||||
) -> (PdfChunk, HashMap<ExtGState, Ref>) {
|
) -> SourceResult<(PdfChunk, HashMap<ExtGState, Ref>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
context.resources.traverse(&mut |resources| {
|
context.resources.traverse(&mut |resources| {
|
||||||
@ -44,7 +45,9 @@ pub fn write_graphic_states(
|
|||||||
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
|
.non_stroking_alpha(external_gs.fill_opacity as f32 / 255.0)
|
||||||
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
|
.stroking_alpha(external_gs.stroke_opacity as f32 / 255.0);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
@ -3,13 +3,13 @@ use std::hash::Hash;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
use pdf_writer::{
|
use pdf_writer::types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap};
|
||||||
types::{CidFontType, FontFlags, SystemInfo, UnicodeCmap},
|
use pdf_writer::writers::FontDescriptor;
|
||||||
writers::FontDescriptor,
|
use pdf_writer::{Chunk, Filter, Finish, Name, Rect, Ref, Str};
|
||||||
Chunk, Filter, Finish, Name, Rect, Ref, Str,
|
|
||||||
};
|
|
||||||
use subsetter::GlyphRemapper;
|
use subsetter::GlyphRemapper;
|
||||||
use ttf_parser::{name_id, GlyphId, Tag};
|
use ttf_parser::{name_id, GlyphId, Tag};
|
||||||
|
use typst::diag::{At, SourceResult};
|
||||||
|
use typst::syntax::Span;
|
||||||
use typst::text::Font;
|
use typst::text::Font;
|
||||||
use typst::utils::SliceExt;
|
use typst::utils::SliceExt;
|
||||||
|
|
||||||
@ -26,7 +26,9 @@ pub(crate) const SYSTEM_INFO: SystemInfo = SystemInfo {
|
|||||||
|
|
||||||
/// Embed all used fonts into the PDF.
|
/// Embed all used fonts into the PDF.
|
||||||
#[typst_macros::time(name = "write fonts")]
|
#[typst_macros::time(name = "write fonts")]
|
||||||
pub fn write_fonts(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Font, Ref>) {
|
pub fn write_fonts(
|
||||||
|
context: &WithGlobalRefs,
|
||||||
|
) -> SourceResult<(PdfChunk, HashMap<Font, Ref>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
context.resources.traverse(&mut |resources| {
|
context.resources.traverse(&mut |resources| {
|
||||||
@ -118,7 +120,14 @@ pub fn write_fonts(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Font, Ref>) {
|
|||||||
let cmap = create_cmap(glyph_set, glyph_remapper);
|
let cmap = create_cmap(glyph_set, glyph_remapper);
|
||||||
chunk.cmap(cmap_ref, &cmap).filter(Filter::FlateDecode);
|
chunk.cmap(cmap_ref, &cmap).filter(Filter::FlateDecode);
|
||||||
|
|
||||||
let subset = subset_font(font, glyph_remapper);
|
let subset = subset_font(font, glyph_remapper)
|
||||||
|
.map_err(|err| {
|
||||||
|
let postscript_name = font.find_name(name_id::POST_SCRIPT_NAME);
|
||||||
|
let name = postscript_name.as_deref().unwrap_or(&font.info().family);
|
||||||
|
eco_format!("failed to process font {name}: {err}")
|
||||||
|
})
|
||||||
|
.at(Span::detached())?;
|
||||||
|
|
||||||
let mut stream = chunk.stream(data_ref, &subset);
|
let mut stream = chunk.stream(data_ref, &subset);
|
||||||
stream.filter(Filter::FlateDecode);
|
stream.filter(Filter::FlateDecode);
|
||||||
if is_cff {
|
if is_cff {
|
||||||
@ -134,9 +143,11 @@ pub fn write_fonts(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Font, Ref>) {
|
|||||||
font_descriptor.font_file2(data_ref);
|
font_descriptor.font_file2(data_ref);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes a FontDescriptor dictionary.
|
/// Writes a FontDescriptor dictionary.
|
||||||
@ -144,16 +155,16 @@ pub fn write_font_descriptor<'a>(
|
|||||||
pdf: &'a mut Chunk,
|
pdf: &'a mut Chunk,
|
||||||
descriptor_ref: Ref,
|
descriptor_ref: Ref,
|
||||||
font: &'a Font,
|
font: &'a Font,
|
||||||
base_font: &EcoString,
|
base_font: &str,
|
||||||
) -> FontDescriptor<'a> {
|
) -> FontDescriptor<'a> {
|
||||||
let ttf = font.ttf();
|
let ttf = font.ttf();
|
||||||
let metrics = font.metrics();
|
let metrics = font.metrics();
|
||||||
let postscript_name = font
|
let serif = font
|
||||||
.find_name(name_id::POST_SCRIPT_NAME)
|
.find_name(name_id::POST_SCRIPT_NAME)
|
||||||
.unwrap_or_else(|| "unknown".to_string());
|
.is_some_and(|name| name.contains("Serif"));
|
||||||
|
|
||||||
let mut flags = FontFlags::empty();
|
let mut flags = FontFlags::empty();
|
||||||
flags.set(FontFlags::SERIF, postscript_name.contains("Serif"));
|
flags.set(FontFlags::SERIF, serif);
|
||||||
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
|
flags.set(FontFlags::FIXED_PITCH, ttf.is_monospaced());
|
||||||
flags.set(FontFlags::ITALIC, ttf.is_italic());
|
flags.set(FontFlags::ITALIC, ttf.is_italic());
|
||||||
flags.insert(FontFlags::SYMBOLIC);
|
flags.insert(FontFlags::SYMBOLIC);
|
||||||
@ -196,12 +207,13 @@ pub fn write_font_descriptor<'a>(
|
|||||||
/// In both cases, this returns the already compressed data.
|
/// In both cases, this returns the already compressed data.
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
#[typst_macros::time(name = "subset font")]
|
#[typst_macros::time(name = "subset font")]
|
||||||
fn subset_font(font: &Font, glyph_remapper: &GlyphRemapper) -> Arc<Vec<u8>> {
|
fn subset_font(
|
||||||
|
font: &Font,
|
||||||
|
glyph_remapper: &GlyphRemapper,
|
||||||
|
) -> Result<Arc<Vec<u8>>, subsetter::Error> {
|
||||||
let data = font.data();
|
let data = font.data();
|
||||||
// TODO: Fail export instead of unwrapping once export diagnostics exist.
|
let subset = subsetter::subset(data, font.index(), glyph_remapper)?;
|
||||||
let subsetted = subsetter::subset(data, font.index(), glyph_remapper).unwrap();
|
let mut data = subset.as_ref();
|
||||||
|
|
||||||
let mut data = subsetted.as_ref();
|
|
||||||
|
|
||||||
// Extract the standalone CFF font program if applicable.
|
// Extract the standalone CFF font program if applicable.
|
||||||
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
|
let raw = ttf_parser::RawFace::parse(data, 0).unwrap();
|
||||||
@ -209,7 +221,7 @@ fn subset_font(font: &Font, glyph_remapper: &GlyphRemapper) -> Arc<Vec<u8>> {
|
|||||||
data = cff;
|
data = cff;
|
||||||
}
|
}
|
||||||
|
|
||||||
Arc::new(deflate(data))
|
Ok(Arc::new(deflate(data)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Produce a unique 6 letter tag for a glyph set.
|
/// Produce a unique 6 letter tag for a glyph set.
|
||||||
|
@ -3,12 +3,10 @@ use std::f32::consts::{PI, TAU};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use pdf_writer::{
|
use pdf_writer::types::{ColorSpaceOperand, FunctionShadingType};
|
||||||
types::{ColorSpaceOperand, FunctionShadingType},
|
use pdf_writer::writers::StreamShadingType;
|
||||||
writers::StreamShadingType,
|
use pdf_writer::{Filter, Finish, Name, Ref};
|
||||||
Filter, Finish, Name, Ref,
|
use typst::diag::SourceResult;
|
||||||
};
|
|
||||||
|
|
||||||
use typst::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
|
use typst::layout::{Abs, Angle, Point, Quadrant, Ratio, Transform};
|
||||||
use typst::utils::Numeric;
|
use typst::utils::Numeric;
|
||||||
use typst::visualize::{
|
use typst::visualize::{
|
||||||
@ -38,7 +36,7 @@ pub struct PdfGradient {
|
|||||||
/// This is performed once after writing all pages.
|
/// This is performed once after writing all pages.
|
||||||
pub fn write_gradients(
|
pub fn write_gradients(
|
||||||
context: &WithGlobalRefs,
|
context: &WithGlobalRefs,
|
||||||
) -> (PdfChunk, HashMap<PdfGradient, Ref>) {
|
) -> SourceResult<(PdfChunk, HashMap<PdfGradient, Ref>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
context.resources.traverse(&mut |resources| {
|
context.resources.traverse(&mut |resources| {
|
||||||
@ -161,9 +159,11 @@ pub fn write_gradients(
|
|||||||
|
|
||||||
shading_pattern.matrix(transform_to_array(*transform));
|
shading_pattern.matrix(transform_to_array(*transform));
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Writes an exponential or stitched function that expresses the gradient.
|
/// Writes an exponential or stitched function that expresses the gradient.
|
||||||
@ -249,7 +249,7 @@ impl PaintEncode for Gradient {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
ctx.reset_fill_color_space();
|
ctx.reset_fill_color_space();
|
||||||
|
|
||||||
let index = register_gradient(ctx, self, on_text, transforms);
|
let index = register_gradient(ctx, self, on_text, transforms);
|
||||||
@ -258,6 +258,7 @@ impl PaintEncode for Gradient {
|
|||||||
|
|
||||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||||
ctx.content.set_fill_pattern(None, name);
|
ctx.content.set_fill_pattern(None, name);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_as_stroke(
|
fn set_as_stroke(
|
||||||
@ -265,7 +266,7 @@ impl PaintEncode for Gradient {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
ctx.reset_stroke_color_space();
|
ctx.reset_stroke_color_space();
|
||||||
|
|
||||||
let index = register_gradient(ctx, self, on_text, transforms);
|
let index = register_gradient(ctx, self, on_text, transforms);
|
||||||
@ -274,6 +275,7 @@ impl PaintEncode for Gradient {
|
|||||||
|
|
||||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||||
ctx.content.set_stroke_pattern(None, name);
|
ctx.content.set_stroke_pattern(None, name);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
|
|
||||||
|
use ecow::eco_format;
|
||||||
use image::{DynamicImage, GenericImageView, Rgba};
|
use image::{DynamicImage, GenericImageView, Rgba};
|
||||||
use pdf_writer::{Chunk, Filter, Finish, Ref};
|
use pdf_writer::{Chunk, Filter, Finish, Ref};
|
||||||
|
use typst::diag::{At, SourceResult, StrResult};
|
||||||
use typst::utils::Deferred;
|
use typst::utils::Deferred;
|
||||||
use typst::visualize::{
|
use typst::visualize::{
|
||||||
ColorSpace, Image, ImageKind, RasterFormat, RasterImage, SvgImage,
|
ColorSpace, Image, ImageKind, RasterFormat, RasterImage, SvgImage,
|
||||||
@ -12,7 +14,9 @@ use crate::{color, deflate, PdfChunk, WithGlobalRefs};
|
|||||||
|
|
||||||
/// Embed all used images into the PDF.
|
/// Embed all used images into the PDF.
|
||||||
#[typst_macros::time(name = "write images")]
|
#[typst_macros::time(name = "write images")]
|
||||||
pub fn write_images(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Image, Ref>) {
|
pub fn write_images(
|
||||||
|
context: &WithGlobalRefs,
|
||||||
|
) -> SourceResult<(PdfChunk, HashMap<Image, Ref>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
context.resources.traverse(&mut |resources| {
|
context.resources.traverse(&mut |resources| {
|
||||||
@ -21,8 +25,10 @@ pub fn write_images(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Image, Ref>)
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let handle = resources.deferred_images.get(&i).unwrap();
|
let (handle, span) = resources.deferred_images.get(&i).unwrap();
|
||||||
match handle.wait() {
|
let encoded = handle.wait().as_ref().map_err(Clone::clone).at(*span)?;
|
||||||
|
|
||||||
|
match encoded {
|
||||||
EncodedImage::Raster {
|
EncodedImage::Raster {
|
||||||
data,
|
data,
|
||||||
filter,
|
filter,
|
||||||
@ -99,16 +105,20 @@ pub fn write_images(context: &WithGlobalRefs) -> (PdfChunk, HashMap<Image, Ref>)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new PDF image from the given image.
|
/// Creates a new PDF image from the given image.
|
||||||
///
|
///
|
||||||
/// Also starts the deferred encoding of the image.
|
/// Also starts the deferred encoding of the image.
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
pub fn deferred_image(image: Image) -> (Deferred<EncodedImage>, Option<ColorSpace>) {
|
pub fn deferred_image(
|
||||||
|
image: Image,
|
||||||
|
) -> (Deferred<StrResult<EncodedImage>>, Option<ColorSpace>) {
|
||||||
let color_space = match image.kind() {
|
let color_space = match image.kind() {
|
||||||
ImageKind::Raster(raster) if raster.icc().is_none() => {
|
ImageKind::Raster(raster) if raster.icc().is_none() => {
|
||||||
if raster.dynamic().color().channel_count() > 2 {
|
if raster.dynamic().color().channel_count() > 2 {
|
||||||
@ -130,11 +140,20 @@ pub fn deferred_image(image: Image) -> (Deferred<EncodedImage>, Option<ColorSpac
|
|||||||
let alpha =
|
let alpha =
|
||||||
raster.dynamic().color().has_alpha().then(|| encode_alpha(&raster));
|
raster.dynamic().color().has_alpha().then(|| encode_alpha(&raster));
|
||||||
|
|
||||||
EncodedImage::Raster { data, filter, has_color, width, height, icc, alpha }
|
Ok(EncodedImage::Raster {
|
||||||
|
data,
|
||||||
|
filter,
|
||||||
|
has_color,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
|
icc,
|
||||||
|
alpha,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
ImageKind::Svg(svg) => {
|
ImageKind::Svg(svg) => {
|
||||||
let (chunk, id) = encode_svg(svg);
|
let (chunk, id) = encode_svg(svg)
|
||||||
EncodedImage::Svg(chunk, id)
|
.map_err(|err| eco_format!("failed to convert SVG to PDF: {err}"))?;
|
||||||
|
Ok(EncodedImage::Svg(chunk, id))
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -182,9 +201,8 @@ fn encode_alpha(raster: &RasterImage) -> (Vec<u8>, Filter) {
|
|||||||
|
|
||||||
/// Encode an SVG into a chunk of PDF objects.
|
/// Encode an SVG into a chunk of PDF objects.
|
||||||
#[typst_macros::time(name = "encode svg")]
|
#[typst_macros::time(name = "encode svg")]
|
||||||
fn encode_svg(svg: &SvgImage) -> (Chunk, Ref) {
|
fn encode_svg(svg: &SvgImage) -> Result<(Chunk, Ref), svg2pdf::ConversionError> {
|
||||||
// TODO: Don't unwrap once we have export diagnostics.
|
svg2pdf::to_chunk(svg.tree(), svg2pdf::ConversionOptions::default())
|
||||||
svg2pdf::to_chunk(svg.tree(), svg2pdf::ConversionOptions::default()).unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A pre-encoded image.
|
/// A pre-encoded image.
|
||||||
|
@ -20,6 +20,7 @@ use std::ops::{Deref, DerefMut};
|
|||||||
|
|
||||||
use base64::Engine;
|
use base64::Engine;
|
||||||
use pdf_writer::{Chunk, Pdf, Ref};
|
use pdf_writer::{Chunk, Pdf, Ref};
|
||||||
|
use typst::diag::SourceResult;
|
||||||
use typst::foundations::{Datetime, Smart};
|
use typst::foundations::{Datetime, Smart};
|
||||||
use typst::layout::{Abs, Em, PageRanges, Transform};
|
use typst::layout::{Abs, Em, PageRanges, Transform};
|
||||||
use typst::model::Document;
|
use typst::model::Document;
|
||||||
@ -64,31 +65,53 @@ use crate::resources::{
|
|||||||
/// The `page_ranges` option specifies which ranges of pages should be exported
|
/// The `page_ranges` option specifies which ranges of pages should be exported
|
||||||
/// in the PDF. When `None`, all pages should be exported.
|
/// in the PDF. When `None`, all pages should be exported.
|
||||||
#[typst_macros::time(name = "pdf")]
|
#[typst_macros::time(name = "pdf")]
|
||||||
pub fn pdf(
|
pub fn pdf(document: &Document, options: &PdfOptions) -> SourceResult<Vec<u8>> {
|
||||||
document: &Document,
|
PdfBuilder::new(document, options)
|
||||||
ident: Smart<&str>,
|
.phase(|builder| builder.run(traverse_pages))?
|
||||||
timestamp: Option<Datetime>,
|
.phase(|builder| {
|
||||||
page_ranges: Option<PageRanges>,
|
Ok(GlobalRefs {
|
||||||
) -> Vec<u8> {
|
color_functions: builder.run(alloc_color_functions_refs)?,
|
||||||
PdfBuilder::new(document, page_ranges)
|
pages: builder.run(alloc_page_refs)?,
|
||||||
.phase(|builder| builder.run(traverse_pages))
|
resources: builder.run(alloc_resources_refs)?,
|
||||||
.phase(|builder| GlobalRefs {
|
})
|
||||||
color_functions: builder.run(alloc_color_functions_refs),
|
})?
|
||||||
pages: builder.run(alloc_page_refs),
|
.phase(|builder| {
|
||||||
resources: builder.run(alloc_resources_refs),
|
Ok(References {
|
||||||
})
|
named_destinations: builder.run(write_named_destinations)?,
|
||||||
.phase(|builder| References {
|
fonts: builder.run(write_fonts)?,
|
||||||
named_destinations: builder.run(write_named_destinations),
|
color_fonts: builder.run(write_color_fonts)?,
|
||||||
fonts: builder.run(write_fonts),
|
images: builder.run(write_images)?,
|
||||||
color_fonts: builder.run(write_color_fonts),
|
gradients: builder.run(write_gradients)?,
|
||||||
images: builder.run(write_images),
|
patterns: builder.run(write_patterns)?,
|
||||||
gradients: builder.run(write_gradients),
|
ext_gs: builder.run(write_graphic_states)?,
|
||||||
patterns: builder.run(write_patterns),
|
})
|
||||||
ext_gs: builder.run(write_graphic_states),
|
})?
|
||||||
})
|
.phase(|builder| builder.run(write_page_tree))?
|
||||||
.phase(|builder| builder.run(write_page_tree))
|
.phase(|builder| builder.run(write_resource_dictionaries))?
|
||||||
.phase(|builder| builder.run(write_resource_dictionaries))
|
.export_with(write_catalog)
|
||||||
.export_with(ident, timestamp, write_catalog)
|
}
|
||||||
|
|
||||||
|
/// Settings for PDF export.
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct PdfOptions<'a> {
|
||||||
|
/// If given, shall be a string that uniquely and stably identifies the
|
||||||
|
/// document. It should not change between compilations of the same
|
||||||
|
/// document. **If you cannot provide such a stable identifier, just pass
|
||||||
|
/// `Smart::Auto` rather than trying to come up with one.** The CLI, for
|
||||||
|
/// example, does not have a well-defined notion of a long-lived project and
|
||||||
|
/// as such just passes `Smart::Auto`.
|
||||||
|
///
|
||||||
|
/// If an `ident` is given, the hash of it will be used to create a PDF
|
||||||
|
/// document identifier (the identifier itself is not leaked). If `ident` is
|
||||||
|
/// `Auto`, a hash of the document's title and author is used instead (which
|
||||||
|
/// is reasonably unique and stable).
|
||||||
|
pub ident: Smart<&'a str>,
|
||||||
|
/// If given, is expected to be the creation date of the document as a UTC
|
||||||
|
/// datetime. It will only be used if `set document(date: ..)` is `auto`.
|
||||||
|
pub timestamp: Option<Datetime>,
|
||||||
|
/// Specifies which ranges of pages should be exported in the PDF. When
|
||||||
|
/// `None`, all pages should be exported.
|
||||||
|
pub page_ranges: Option<PageRanges>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A struct to build a PDF following a fixed succession of phases.
|
/// A struct to build a PDF following a fixed succession of phases.
|
||||||
@ -124,9 +147,8 @@ struct PdfBuilder<S> {
|
|||||||
struct WithDocument<'a> {
|
struct WithDocument<'a> {
|
||||||
/// The Typst document that is exported.
|
/// The Typst document that is exported.
|
||||||
document: &'a Document,
|
document: &'a Document,
|
||||||
/// Page ranges to export.
|
/// Settings for PDF export.
|
||||||
/// When `None`, all pages are exported.
|
options: &'a PdfOptions<'a>,
|
||||||
exported_pages: Option<PageRanges>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// At this point, resources were listed, but they don't have any reference
|
/// At this point, resources were listed, but they don't have any reference
|
||||||
@ -135,7 +157,7 @@ struct WithDocument<'a> {
|
|||||||
/// This phase allocates some global references.
|
/// This phase allocates some global references.
|
||||||
struct WithResources<'a> {
|
struct WithResources<'a> {
|
||||||
document: &'a Document,
|
document: &'a Document,
|
||||||
exported_pages: Option<PageRanges>,
|
options: &'a PdfOptions<'a>,
|
||||||
/// The content of the pages encoded as PDF content streams.
|
/// The content of the pages encoded as PDF content streams.
|
||||||
///
|
///
|
||||||
/// The pages are at the index corresponding to their page number, but they
|
/// The pages are at the index corresponding to their page number, but they
|
||||||
@ -170,7 +192,7 @@ impl<'a> From<(WithDocument<'a>, (Vec<Option<EncodedPage>>, Resources<()>))>
|
|||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
document: previous.document,
|
document: previous.document,
|
||||||
exported_pages: previous.exported_pages,
|
options: previous.options,
|
||||||
pages,
|
pages,
|
||||||
resources,
|
resources,
|
||||||
}
|
}
|
||||||
@ -184,7 +206,7 @@ impl<'a> From<(WithDocument<'a>, (Vec<Option<EncodedPage>>, Resources<()>))>
|
|||||||
/// that will be collected in [`References`].
|
/// that will be collected in [`References`].
|
||||||
struct WithGlobalRefs<'a> {
|
struct WithGlobalRefs<'a> {
|
||||||
document: &'a Document,
|
document: &'a Document,
|
||||||
exported_pages: Option<PageRanges>,
|
options: &'a PdfOptions<'a>,
|
||||||
pages: Vec<Option<EncodedPage>>,
|
pages: Vec<Option<EncodedPage>>,
|
||||||
/// Resources are the same as in previous phases, but each dictionary now has a reference.
|
/// Resources are the same as in previous phases, but each dictionary now has a reference.
|
||||||
resources: Resources,
|
resources: Resources,
|
||||||
@ -196,7 +218,7 @@ impl<'a> From<(WithResources<'a>, GlobalRefs)> for WithGlobalRefs<'a> {
|
|||||||
fn from((previous, globals): (WithResources<'a>, GlobalRefs)) -> Self {
|
fn from((previous, globals): (WithResources<'a>, GlobalRefs)) -> Self {
|
||||||
Self {
|
Self {
|
||||||
document: previous.document,
|
document: previous.document,
|
||||||
exported_pages: previous.exported_pages,
|
options: previous.options,
|
||||||
pages: previous.pages,
|
pages: previous.pages,
|
||||||
resources: previous.resources.with_refs(&globals.resources),
|
resources: previous.resources.with_refs(&globals.resources),
|
||||||
globals,
|
globals,
|
||||||
@ -226,10 +248,10 @@ struct References {
|
|||||||
/// tree is going to be written, and given a reference. It is also at this point that
|
/// tree is going to be written, and given a reference. It is also at this point that
|
||||||
/// the page contents is actually written.
|
/// the page contents is actually written.
|
||||||
struct WithRefs<'a> {
|
struct WithRefs<'a> {
|
||||||
globals: GlobalRefs,
|
|
||||||
document: &'a Document,
|
document: &'a Document,
|
||||||
|
options: &'a PdfOptions<'a>,
|
||||||
|
globals: GlobalRefs,
|
||||||
pages: Vec<Option<EncodedPage>>,
|
pages: Vec<Option<EncodedPage>>,
|
||||||
exported_pages: Option<PageRanges>,
|
|
||||||
resources: Resources,
|
resources: Resources,
|
||||||
/// References that were allocated for resources.
|
/// References that were allocated for resources.
|
||||||
references: References,
|
references: References,
|
||||||
@ -238,9 +260,9 @@ struct WithRefs<'a> {
|
|||||||
impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
|
impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
|
||||||
fn from((previous, references): (WithGlobalRefs<'a>, References)) -> Self {
|
fn from((previous, references): (WithGlobalRefs<'a>, References)) -> Self {
|
||||||
Self {
|
Self {
|
||||||
globals: previous.globals,
|
|
||||||
exported_pages: previous.exported_pages,
|
|
||||||
document: previous.document,
|
document: previous.document,
|
||||||
|
options: previous.options,
|
||||||
|
globals: previous.globals,
|
||||||
pages: previous.pages,
|
pages: previous.pages,
|
||||||
resources: previous.resources,
|
resources: previous.resources,
|
||||||
references,
|
references,
|
||||||
@ -252,10 +274,10 @@ impl<'a> From<(WithGlobalRefs<'a>, References)> for WithRefs<'a> {
|
|||||||
///
|
///
|
||||||
/// Each sub-resource gets its own isolated resource dictionary.
|
/// Each sub-resource gets its own isolated resource dictionary.
|
||||||
struct WithEverything<'a> {
|
struct WithEverything<'a> {
|
||||||
globals: GlobalRefs,
|
|
||||||
document: &'a Document,
|
document: &'a Document,
|
||||||
|
options: &'a PdfOptions<'a>,
|
||||||
|
globals: GlobalRefs,
|
||||||
pages: Vec<Option<EncodedPage>>,
|
pages: Vec<Option<EncodedPage>>,
|
||||||
exported_pages: Option<PageRanges>,
|
|
||||||
resources: Resources,
|
resources: Resources,
|
||||||
references: References,
|
references: References,
|
||||||
/// Reference that was allocated for the page tree.
|
/// Reference that was allocated for the page tree.
|
||||||
@ -271,9 +293,9 @@ impl<'a> From<(WithEverything<'a>, ())> for WithEverything<'a> {
|
|||||||
impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
|
impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
|
||||||
fn from((previous, page_tree_ref): (WithRefs<'a>, Ref)) -> Self {
|
fn from((previous, page_tree_ref): (WithRefs<'a>, Ref)) -> Self {
|
||||||
Self {
|
Self {
|
||||||
exported_pages: previous.exported_pages,
|
|
||||||
globals: previous.globals,
|
|
||||||
document: previous.document,
|
document: previous.document,
|
||||||
|
options: previous.options,
|
||||||
|
globals: previous.globals,
|
||||||
resources: previous.resources,
|
resources: previous.resources,
|
||||||
references: previous.references,
|
references: previous.references,
|
||||||
pages: previous.pages,
|
pages: previous.pages,
|
||||||
@ -284,42 +306,42 @@ impl<'a> From<(WithRefs<'a>, Ref)> for WithEverything<'a> {
|
|||||||
|
|
||||||
impl<'a> PdfBuilder<WithDocument<'a>> {
|
impl<'a> PdfBuilder<WithDocument<'a>> {
|
||||||
/// Start building a PDF for a Typst document.
|
/// Start building a PDF for a Typst document.
|
||||||
fn new(document: &'a Document, exported_pages: Option<PageRanges>) -> Self {
|
fn new(document: &'a Document, options: &'a PdfOptions<'a>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
alloc: Ref::new(1),
|
alloc: Ref::new(1),
|
||||||
pdf: Pdf::new(),
|
pdf: Pdf::new(),
|
||||||
state: WithDocument { document, exported_pages },
|
state: WithDocument { document, options },
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> PdfBuilder<S> {
|
impl<S> PdfBuilder<S> {
|
||||||
/// Start a new phase, and save its output in the global state.
|
/// Start a new phase, and save its output in the global state.
|
||||||
fn phase<NS, B, O>(mut self, builder: B) -> PdfBuilder<NS>
|
fn phase<NS, B, O>(mut self, builder: B) -> SourceResult<PdfBuilder<NS>>
|
||||||
where
|
where
|
||||||
// New state
|
// New state
|
||||||
NS: From<(S, O)>,
|
NS: From<(S, O)>,
|
||||||
// Builder
|
// Builder
|
||||||
B: Fn(&mut Self) -> O,
|
B: Fn(&mut Self) -> SourceResult<O>,
|
||||||
{
|
{
|
||||||
let output = builder(&mut self);
|
let output = builder(&mut self)?;
|
||||||
PdfBuilder {
|
Ok(PdfBuilder {
|
||||||
state: NS::from((self.state, output)),
|
state: NS::from((self.state, output)),
|
||||||
alloc: self.alloc,
|
alloc: self.alloc,
|
||||||
pdf: self.pdf,
|
pdf: self.pdf,
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Runs a step with the current state, merge its output in the PDF file,
|
/// Run a step with the current state, merges its output into the PDF file,
|
||||||
/// and renumber any references it returned.
|
/// and renumbers any references it returned.
|
||||||
fn run<P, O>(&mut self, process: P) -> O
|
fn run<P, O>(&mut self, process: P) -> SourceResult<O>
|
||||||
where
|
where
|
||||||
// Process
|
// Process
|
||||||
P: Fn(&S) -> (PdfChunk, O),
|
P: Fn(&S) -> SourceResult<(PdfChunk, O)>,
|
||||||
// Output
|
// Output
|
||||||
O: Renumber,
|
O: Renumber,
|
||||||
{
|
{
|
||||||
let (chunk, mut output) = process(&self.state);
|
let (chunk, mut output) = process(&self.state)?;
|
||||||
// Allocate a final reference for each temporary one
|
// Allocate a final reference for each temporary one
|
||||||
let allocated = chunk.alloc.get() - TEMPORARY_REFS_START;
|
let allocated = chunk.alloc.get() - TEMPORARY_REFS_START;
|
||||||
let offset = TEMPORARY_REFS_START - self.alloc.get();
|
let offset = TEMPORARY_REFS_START - self.alloc.get();
|
||||||
@ -336,22 +358,17 @@ impl<S> PdfBuilder<S> {
|
|||||||
|
|
||||||
self.alloc = Ref::new(self.alloc.get() + allocated);
|
self.alloc = Ref::new(self.alloc.get() + allocated);
|
||||||
|
|
||||||
output
|
Ok(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Finalize the PDF export and returns the buffer representing the
|
/// Finalize the PDF export and returns the buffer representing the
|
||||||
/// document.
|
/// document.
|
||||||
fn export_with<P>(
|
fn export_with<P>(mut self, process: P) -> SourceResult<Vec<u8>>
|
||||||
mut self,
|
|
||||||
ident: Smart<&str>,
|
|
||||||
timestamp: Option<Datetime>,
|
|
||||||
process: P,
|
|
||||||
) -> Vec<u8>
|
|
||||||
where
|
where
|
||||||
P: Fn(S, Smart<&str>, Option<Datetime>, &mut Pdf, &mut Ref),
|
P: Fn(S, &mut Pdf, &mut Ref) -> SourceResult<()>,
|
||||||
{
|
{
|
||||||
process(self.state, ident, timestamp, &mut self.pdf, &mut self.alloc);
|
process(self.state, &mut self.pdf, &mut self.alloc)?;
|
||||||
self.pdf.finish()
|
Ok(self.pdf.finish())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
|
|
||||||
use pdf_writer::{writers::Destination, Ref};
|
use pdf_writer::writers::Destination;
|
||||||
|
use pdf_writer::Ref;
|
||||||
|
use typst::diag::SourceResult;
|
||||||
use typst::foundations::{Label, NativeElement};
|
use typst::foundations::{Label, NativeElement};
|
||||||
use typst::introspection::Location;
|
use typst::introspection::Location;
|
||||||
use typst::layout::Abs;
|
use typst::layout::Abs;
|
||||||
@ -34,7 +36,7 @@ impl Renumber for NamedDestinations {
|
|||||||
/// destination objects.
|
/// destination objects.
|
||||||
pub fn write_named_destinations(
|
pub fn write_named_destinations(
|
||||||
context: &WithGlobalRefs,
|
context: &WithGlobalRefs,
|
||||||
) -> (PdfChunk, NamedDestinations) {
|
) -> SourceResult<(PdfChunk, NamedDestinations)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = NamedDestinations::default();
|
let mut out = NamedDestinations::default();
|
||||||
let mut seen = HashSet::new();
|
let mut seen = HashSet::new();
|
||||||
@ -74,5 +76,5 @@ pub fn write_named_destinations(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(chunk, out)
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use pdf_writer::{Finish, Pdf, Ref, TextStr};
|
use pdf_writer::{Finish, Pdf, Ref, TextStr};
|
||||||
|
|
||||||
use typst::foundations::{NativeElement, Packed, StyleChain};
|
use typst::foundations::{NativeElement, Packed, StyleChain};
|
||||||
use typst::layout::Abs;
|
use typst::layout::Abs;
|
||||||
use typst::model::HeadingElem;
|
use typst::model::HeadingElem;
|
||||||
@ -25,7 +24,7 @@ pub(crate) fn write_outline(
|
|||||||
let elements = ctx.document.introspector.query(&HeadingElem::elem().select());
|
let elements = ctx.document.introspector.query(&HeadingElem::elem().select());
|
||||||
|
|
||||||
for elem in elements.iter() {
|
for elem in elements.iter() {
|
||||||
if let Some(page_ranges) = &ctx.exported_pages {
|
if let Some(page_ranges) = &ctx.options.page_ranges {
|
||||||
if !page_ranges
|
if !page_ranges
|
||||||
.includes_page(ctx.document.introspector.page(elem.location().unwrap()))
|
.includes_page(ctx.document.introspector.page(elem.location().unwrap()))
|
||||||
{
|
{
|
||||||
|
@ -2,30 +2,33 @@ use std::collections::HashMap;
|
|||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
|
|
||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
use pdf_writer::{
|
use pdf_writer::types::{ActionType, AnnotationFlags, AnnotationType, NumberingStyle};
|
||||||
types::{ActionType, AnnotationFlags, AnnotationType, NumberingStyle},
|
use pdf_writer::{Filter, Finish, Name, Rect, Ref, Str};
|
||||||
Filter, Finish, Name, Rect, Ref, Str,
|
use typst::diag::SourceResult;
|
||||||
};
|
|
||||||
use typst::foundations::Label;
|
use typst::foundations::Label;
|
||||||
use typst::introspection::Location;
|
use typst::introspection::Location;
|
||||||
use typst::layout::{Abs, Page};
|
use typst::layout::{Abs, Page};
|
||||||
use typst::model::{Destination, Numbering};
|
use typst::model::{Destination, Numbering};
|
||||||
use typst::text::Case;
|
use typst::text::Case;
|
||||||
|
|
||||||
use crate::Resources;
|
use crate::content;
|
||||||
use crate::{content, AbsExt, PdfChunk, WithDocument, WithRefs, WithResources};
|
use crate::{
|
||||||
|
AbsExt, PdfChunk, PdfOptions, Resources, WithDocument, WithRefs, WithResources,
|
||||||
|
};
|
||||||
|
|
||||||
/// Construct page objects.
|
/// Construct page objects.
|
||||||
#[typst_macros::time(name = "construct pages")]
|
#[typst_macros::time(name = "construct pages")]
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
pub fn traverse_pages(
|
pub fn traverse_pages(
|
||||||
state: &WithDocument,
|
state: &WithDocument,
|
||||||
) -> (PdfChunk, (Vec<Option<EncodedPage>>, Resources<()>)) {
|
) -> SourceResult<(PdfChunk, (Vec<Option<EncodedPage>>, Resources<()>))> {
|
||||||
let mut resources = Resources::default();
|
let mut resources = Resources::default();
|
||||||
let mut pages = Vec::with_capacity(state.document.pages.len());
|
let mut pages = Vec::with_capacity(state.document.pages.len());
|
||||||
let mut skipped_pages = 0;
|
let mut skipped_pages = 0;
|
||||||
for (i, page) in state.document.pages.iter().enumerate() {
|
for (i, page) in state.document.pages.iter().enumerate() {
|
||||||
if state
|
if state
|
||||||
.exported_pages
|
.options
|
||||||
|
.page_ranges
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.is_some_and(|ranges| !ranges.includes_page_index(i))
|
.is_some_and(|ranges| !ranges.includes_page_index(i))
|
||||||
{
|
{
|
||||||
@ -33,7 +36,7 @@ pub fn traverse_pages(
|
|||||||
pages.push(None);
|
pages.push(None);
|
||||||
skipped_pages += 1;
|
skipped_pages += 1;
|
||||||
} else {
|
} else {
|
||||||
let mut encoded = construct_page(&mut resources, page);
|
let mut encoded = construct_page(state.options, &mut resources, page)?;
|
||||||
encoded.label = page
|
encoded.label = page
|
||||||
.numbering
|
.numbering
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@ -52,29 +55,43 @@ pub fn traverse_pages(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(PdfChunk::new(), (pages, resources))
|
Ok((PdfChunk::new(), (pages, resources)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a page object.
|
/// Construct a page object.
|
||||||
#[typst_macros::time(name = "construct page")]
|
#[typst_macros::time(name = "construct page")]
|
||||||
fn construct_page(out: &mut Resources<()>, page: &Page) -> EncodedPage {
|
fn construct_page(
|
||||||
let content = content::build(out, &page.frame, page.fill_or_transparent(), None);
|
options: &PdfOptions,
|
||||||
EncodedPage { content, label: None }
|
out: &mut Resources<()>,
|
||||||
|
page: &Page,
|
||||||
|
) -> SourceResult<EncodedPage> {
|
||||||
|
Ok(EncodedPage {
|
||||||
|
content: content::build(
|
||||||
|
options,
|
||||||
|
out,
|
||||||
|
&page.frame,
|
||||||
|
page.fill_or_transparent(),
|
||||||
|
None,
|
||||||
|
)?,
|
||||||
|
label: None,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate a reference for each exported page.
|
/// Allocate a reference for each exported page.
|
||||||
pub fn alloc_page_refs(context: &WithResources) -> (PdfChunk, Vec<Option<Ref>>) {
|
pub fn alloc_page_refs(
|
||||||
|
context: &WithResources,
|
||||||
|
) -> SourceResult<(PdfChunk, Vec<Option<Ref>>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let page_refs = context
|
let page_refs = context
|
||||||
.pages
|
.pages
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| p.as_ref().map(|_| chunk.alloc()))
|
.map(|p| p.as_ref().map(|_| chunk.alloc()))
|
||||||
.collect();
|
.collect();
|
||||||
(chunk, page_refs)
|
Ok((chunk, page_refs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the page tree.
|
/// Write the page tree.
|
||||||
pub fn write_page_tree(ctx: &WithRefs) -> (PdfChunk, Ref) {
|
pub fn write_page_tree(ctx: &WithRefs) -> SourceResult<(PdfChunk, Ref)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let page_tree_ref = chunk.alloc.bump();
|
let page_tree_ref = chunk.alloc.bump();
|
||||||
|
|
||||||
@ -95,7 +112,7 @@ pub fn write_page_tree(ctx: &WithRefs) -> (PdfChunk, Ref) {
|
|||||||
.count(ctx.pages.len() as i32)
|
.count(ctx.pages.len() as i32)
|
||||||
.kids(ctx.globals.pages.iter().filter_map(Option::as_ref).copied());
|
.kids(ctx.globals.pages.iter().filter_map(Option::as_ref).copied());
|
||||||
|
|
||||||
(chunk, page_tree_ref)
|
Ok((chunk, page_tree_ref))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write a page tree node.
|
/// Write a page tree node.
|
||||||
|
@ -1,27 +1,28 @@
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use pdf_writer::{
|
use pdf_writer::types::{ColorSpaceOperand, PaintType, TilingType};
|
||||||
types::{ColorSpaceOperand, PaintType, TilingType},
|
use pdf_writer::{Filter, Name, Rect, Ref};
|
||||||
Filter, Name, Rect, Ref,
|
use typst::diag::SourceResult;
|
||||||
};
|
|
||||||
|
|
||||||
use typst::layout::{Abs, Ratio, Transform};
|
use typst::layout::{Abs, Ratio, Transform};
|
||||||
use typst::utils::Numeric;
|
use typst::utils::Numeric;
|
||||||
use typst::visualize::{Pattern, RelativeTo};
|
use typst::visualize::{Pattern, RelativeTo};
|
||||||
|
|
||||||
use crate::{color::PaintEncode, resources::Remapper, Resources, WithGlobalRefs};
|
use crate::color::PaintEncode;
|
||||||
use crate::{content, resources::ResourcesRefs};
|
use crate::content;
|
||||||
use crate::{transform_to_array, PdfChunk};
|
use crate::resources::{Remapper, ResourcesRefs};
|
||||||
|
use crate::{transform_to_array, PdfChunk, Resources, WithGlobalRefs};
|
||||||
|
|
||||||
/// Writes the actual patterns (tiling patterns) to the PDF.
|
/// Writes the actual patterns (tiling patterns) to the PDF.
|
||||||
/// This is performed once after writing all pages.
|
/// This is performed once after writing all pages.
|
||||||
pub fn write_patterns(context: &WithGlobalRefs) -> (PdfChunk, HashMap<PdfPattern, Ref>) {
|
pub fn write_patterns(
|
||||||
|
context: &WithGlobalRefs,
|
||||||
|
) -> SourceResult<(PdfChunk, HashMap<PdfPattern, Ref>)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut out = HashMap::new();
|
let mut out = HashMap::new();
|
||||||
context.resources.traverse(&mut |resources| {
|
context.resources.traverse(&mut |resources| {
|
||||||
let Some(patterns) = &resources.patterns else {
|
let Some(patterns) = &resources.patterns else {
|
||||||
return;
|
return Ok(());
|
||||||
};
|
};
|
||||||
|
|
||||||
for pdf_pattern in patterns.remapper.items() {
|
for pdf_pattern in patterns.remapper.items() {
|
||||||
@ -60,9 +61,11 @@ pub fn write_patterns(context: &WithGlobalRefs) -> (PdfChunk, HashMap<PdfPattern
|
|||||||
))
|
))
|
||||||
.filter(Filter::FlateDecode);
|
.filter(Filter::FlateDecode);
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
(chunk, out)
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok((chunk, out))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A pattern and its transform.
|
/// A pattern and its transform.
|
||||||
@ -82,7 +85,7 @@ fn register_pattern(
|
|||||||
pattern: &Pattern,
|
pattern: &Pattern,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
mut transforms: content::Transforms,
|
mut transforms: content::Transforms,
|
||||||
) -> usize {
|
) -> SourceResult<usize> {
|
||||||
let patterns = ctx
|
let patterns = ctx
|
||||||
.resources
|
.resources
|
||||||
.patterns
|
.patterns
|
||||||
@ -103,7 +106,13 @@ fn register_pattern(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Render the body.
|
// Render the body.
|
||||||
let content = content::build(&mut patterns.resources, pattern.frame(), None, None);
|
let content = content::build(
|
||||||
|
ctx.options,
|
||||||
|
&mut patterns.resources,
|
||||||
|
pattern.frame(),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)?;
|
||||||
|
|
||||||
let pdf_pattern = PdfPattern {
|
let pdf_pattern = PdfPattern {
|
||||||
transform,
|
transform,
|
||||||
@ -111,7 +120,7 @@ fn register_pattern(
|
|||||||
content: content.content.wait().clone(),
|
content: content.content.wait().clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
patterns.remapper.insert(pdf_pattern)
|
Ok(patterns.remapper.insert(pdf_pattern))
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PaintEncode for Pattern {
|
impl PaintEncode for Pattern {
|
||||||
@ -120,15 +129,16 @@ impl PaintEncode for Pattern {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
ctx.reset_fill_color_space();
|
ctx.reset_fill_color_space();
|
||||||
|
|
||||||
let index = register_pattern(ctx, self, on_text, transforms);
|
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||||
let id = eco_format!("P{index}");
|
let id = eco_format!("P{index}");
|
||||||
let name = Name(id.as_bytes());
|
let name = Name(id.as_bytes());
|
||||||
|
|
||||||
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
ctx.content.set_fill_color_space(ColorSpaceOperand::Pattern);
|
||||||
ctx.content.set_fill_pattern(None, name);
|
ctx.content.set_fill_pattern(None, name);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_as_stroke(
|
fn set_as_stroke(
|
||||||
@ -136,15 +146,16 @@ impl PaintEncode for Pattern {
|
|||||||
ctx: &mut content::Builder,
|
ctx: &mut content::Builder,
|
||||||
on_text: bool,
|
on_text: bool,
|
||||||
transforms: content::Transforms,
|
transforms: content::Transforms,
|
||||||
) {
|
) -> SourceResult<()> {
|
||||||
ctx.reset_stroke_color_space();
|
ctx.reset_stroke_color_space();
|
||||||
|
|
||||||
let index = register_pattern(ctx, self, on_text, transforms);
|
let index = register_pattern(ctx, self, on_text, transforms)?;
|
||||||
let id = eco_format!("P{index}");
|
let id = eco_format!("P{index}");
|
||||||
let name = Name(id.as_bytes());
|
let name = Name(id.as_bytes());
|
||||||
|
|
||||||
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
ctx.content.set_stroke_color_space(ColorSpaceOperand::Pattern);
|
||||||
ctx.content.set_stroke_pattern(None, name);
|
ctx.content.set_stroke_pattern(None, name);
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,14 +12,19 @@ use std::hash::Hash;
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
use pdf_writer::{Dict, Finish, Name, Ref};
|
use pdf_writer::{Dict, Finish, Name, Ref};
|
||||||
use subsetter::GlyphRemapper;
|
use subsetter::GlyphRemapper;
|
||||||
use typst::text::Lang;
|
use typst::diag::{SourceResult, StrResult};
|
||||||
use typst::{text::Font, utils::Deferred, visualize::Image};
|
use typst::syntax::Span;
|
||||||
|
use typst::text::{Font, Lang};
|
||||||
|
use typst::utils::Deferred;
|
||||||
|
use typst::visualize::Image;
|
||||||
|
|
||||||
use crate::{
|
use crate::color::ColorSpaces;
|
||||||
color::ColorSpaces, color_font::ColorFontMap, extg::ExtGState, gradient::PdfGradient,
|
use crate::color_font::ColorFontMap;
|
||||||
image::EncodedImage, pattern::PatternRemapper, PdfChunk, Renumber, WithEverything,
|
use crate::extg::ExtGState;
|
||||||
WithResources,
|
use crate::gradient::PdfGradient;
|
||||||
};
|
use crate::image::EncodedImage;
|
||||||
|
use crate::pattern::PatternRemapper;
|
||||||
|
use crate::{PdfChunk, Renumber, WithEverything, WithResources};
|
||||||
|
|
||||||
/// All the resources that have been collected when traversing the document.
|
/// All the resources that have been collected when traversing the document.
|
||||||
///
|
///
|
||||||
@ -58,7 +63,7 @@ pub struct Resources<R = Ref> {
|
|||||||
/// Deduplicates images used across the document.
|
/// Deduplicates images used across the document.
|
||||||
pub images: Remapper<Image>,
|
pub images: Remapper<Image>,
|
||||||
/// Handles to deferred image conversions.
|
/// Handles to deferred image conversions.
|
||||||
pub deferred_images: HashMap<usize, Deferred<EncodedImage>>,
|
pub deferred_images: HashMap<usize, (Deferred<StrResult<EncodedImage>>, Span)>,
|
||||||
/// Deduplicates gradients used across the document.
|
/// Deduplicates gradients used across the document.
|
||||||
pub gradients: Remapper<PdfGradient>,
|
pub gradients: Remapper<PdfGradient>,
|
||||||
/// Deduplicates patterns used across the document.
|
/// Deduplicates patterns used across the document.
|
||||||
@ -159,17 +164,18 @@ impl Resources<()> {
|
|||||||
impl<R> Resources<R> {
|
impl<R> Resources<R> {
|
||||||
/// Run a function on this resource dictionary and all
|
/// Run a function on this resource dictionary and all
|
||||||
/// of its sub-resources.
|
/// of its sub-resources.
|
||||||
pub fn traverse<P>(&self, process: &mut P)
|
pub fn traverse<P>(&self, process: &mut P) -> SourceResult<()>
|
||||||
where
|
where
|
||||||
P: FnMut(&Self),
|
P: FnMut(&Self) -> SourceResult<()>,
|
||||||
{
|
{
|
||||||
process(self);
|
process(self)?;
|
||||||
if let Some(color_fonts) = &self.color_fonts {
|
if let Some(color_fonts) = &self.color_fonts {
|
||||||
color_fonts.resources.traverse(process)
|
color_fonts.resources.traverse(process)?;
|
||||||
}
|
}
|
||||||
if let Some(patterns) = &self.patterns {
|
if let Some(patterns) = &self.patterns {
|
||||||
patterns.resources.traverse(process)
|
patterns.resources.traverse(process)?;
|
||||||
}
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -196,7 +202,9 @@ impl Renumber for ResourcesRefs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Allocate references for all resource dictionaries.
|
/// Allocate references for all resource dictionaries.
|
||||||
pub fn alloc_resources_refs(context: &WithResources) -> (PdfChunk, ResourcesRefs) {
|
pub fn alloc_resources_refs(
|
||||||
|
context: &WithResources,
|
||||||
|
) -> SourceResult<(PdfChunk, ResourcesRefs)> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
/// Recursively explore resource dictionaries and assign them references.
|
/// Recursively explore resource dictionaries and assign them references.
|
||||||
fn refs_for(resources: &Resources<()>, chunk: &mut PdfChunk) -> ResourcesRefs {
|
fn refs_for(resources: &Resources<()>, chunk: &mut PdfChunk) -> ResourcesRefs {
|
||||||
@ -214,7 +222,7 @@ pub fn alloc_resources_refs(context: &WithResources) -> (PdfChunk, ResourcesRefs
|
|||||||
}
|
}
|
||||||
|
|
||||||
let refs = refs_for(&context.resources, &mut chunk);
|
let refs = refs_for(&context.resources, &mut chunk);
|
||||||
(chunk, refs)
|
Ok((chunk, refs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the resource dictionaries that will be referenced by all pages.
|
/// Write the resource dictionaries that will be referenced by all pages.
|
||||||
@ -224,7 +232,7 @@ pub fn alloc_resources_refs(context: &WithResources) -> (PdfChunk, ResourcesRefs
|
|||||||
/// feature breaks PDF merging with Apple Preview.
|
/// feature breaks PDF merging with Apple Preview.
|
||||||
///
|
///
|
||||||
/// Also write resource dictionaries for Type3 fonts and patterns.
|
/// Also write resource dictionaries for Type3 fonts and patterns.
|
||||||
pub fn write_resource_dictionaries(ctx: &WithEverything) -> (PdfChunk, ()) {
|
pub fn write_resource_dictionaries(ctx: &WithEverything) -> SourceResult<(PdfChunk, ())> {
|
||||||
let mut chunk = PdfChunk::new();
|
let mut chunk = PdfChunk::new();
|
||||||
let mut used_color_spaces = ColorSpaces::default();
|
let mut used_color_spaces = ColorSpaces::default();
|
||||||
|
|
||||||
@ -287,11 +295,13 @@ pub fn write_resource_dictionaries(ctx: &WithEverything) -> (PdfChunk, ()) {
|
|||||||
resources
|
resources
|
||||||
.colors
|
.colors
|
||||||
.write_color_spaces(color_spaces, &ctx.globals.color_functions);
|
.write_color_spaces(color_spaces, &ctx.globals.color_functions);
|
||||||
});
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
used_color_spaces.write_functions(&mut chunk, &ctx.globals.color_functions);
|
used_color_spaces.write_functions(&mut chunk, &ctx.globals.color_functions);
|
||||||
|
|
||||||
(chunk, ())
|
Ok((chunk, ()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Assigns new, consecutive PDF-internal indices to items.
|
/// Assigns new, consecutive PDF-internal indices to items.
|
||||||
|
@ -636,7 +636,7 @@ impl Repr for EcoString {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Repr for &str {
|
impl Repr for str {
|
||||||
fn repr(&self) -> EcoString {
|
fn repr(&self) -> EcoString {
|
||||||
let mut r = EcoString::with_capacity(self.len() + 2);
|
let mut r = EcoString::with_capacity(self.len() + 2);
|
||||||
r.push('"');
|
r.push('"');
|
||||||
|
@ -5,11 +5,11 @@ use std::path::Path;
|
|||||||
use ecow::eco_vec;
|
use ecow::eco_vec;
|
||||||
use tiny_skia as sk;
|
use tiny_skia as sk;
|
||||||
use typst::diag::{SourceDiagnostic, Warned};
|
use typst::diag::{SourceDiagnostic, Warned};
|
||||||
use typst::foundations::Smart;
|
|
||||||
use typst::layout::{Abs, Frame, FrameItem, Page, Transform};
|
use typst::layout::{Abs, Frame, FrameItem, Page, Transform};
|
||||||
use typst::model::Document;
|
use typst::model::Document;
|
||||||
use typst::visualize::Color;
|
use typst::visualize::Color;
|
||||||
use typst::WorldExt;
|
use typst::WorldExt;
|
||||||
|
use typst_pdf::PdfOptions;
|
||||||
|
|
||||||
use crate::collect::{FileSize, NoteKind, Test};
|
use crate::collect::{FileSize, NoteKind, Test};
|
||||||
use crate::world::TestWorld;
|
use crate::world::TestWorld;
|
||||||
@ -190,7 +190,7 @@ impl<'a> Runner<'a> {
|
|||||||
// Write PDF if requested.
|
// Write PDF if requested.
|
||||||
if crate::ARGS.pdf() {
|
if crate::ARGS.pdf() {
|
||||||
let pdf_path = format!("{}/pdf/{}.pdf", crate::STORE_PATH, self.test.name);
|
let pdf_path = format!("{}/pdf/{}.pdf", crate::STORE_PATH, self.test.name);
|
||||||
let pdf = typst_pdf::pdf(document, Smart::Auto, None, None);
|
let pdf = typst_pdf::pdf(document, &PdfOptions::default()).unwrap();
|
||||||
std::fs::write(pdf_path, pdf).unwrap();
|
std::fs::write(pdf_path, pdf).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user