mirror of
https://github.com/typst/typst
synced 2025-08-06 19:27:55 +08:00
Compare commits
11 Commits
1cbc4680d6
...
031e826433
Author | SHA1 | Date | |
---|---|---|---|
|
031e826433 | ||
|
b90ad470d6 | ||
|
c22c47b9c9 | ||
|
63c4720ed2 | ||
|
a4ac4e6562 | ||
|
6b9b78596a | ||
|
9473aface1 | ||
|
be6629c7cb | ||
|
e2b37fef33 | ||
|
dacd6acd5e | ||
|
0a374d2380 |
11
Cargo.lock
generated
11
Cargo.lock
generated
@ -3093,6 +3093,7 @@ dependencies = [
|
|||||||
"parking_lot",
|
"parking_lot",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"web-sys",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3418,6 +3419,16 @@ dependencies = [
|
|||||||
"indexmap-nostd",
|
"indexmap-nostd",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "web-sys"
|
||||||
|
version = "0.3.70"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0"
|
||||||
|
dependencies = [
|
||||||
|
"js-sys",
|
||||||
|
"wasm-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "weezl"
|
name = "weezl"
|
||||||
version = "0.1.8"
|
version = "0.1.8"
|
||||||
|
@ -134,6 +134,7 @@ ureq = { version = "2", default-features = false, features = ["native-tls", "gzi
|
|||||||
usvg = { version = "0.43", default-features = false, features = ["text"] }
|
usvg = { version = "0.43", default-features = false, features = ["text"] }
|
||||||
walkdir = "2"
|
walkdir = "2"
|
||||||
wasmi = "0.39.0"
|
wasmi = "0.39.0"
|
||||||
|
web-sys = "0.3"
|
||||||
xmlparser = "0.13.5"
|
xmlparser = "0.13.5"
|
||||||
xmlwriter = "0.1.0"
|
xmlwriter = "0.1.0"
|
||||||
xmp-writer = "0.3"
|
xmp-writer = "0.3"
|
||||||
|
@ -473,6 +473,9 @@ pub enum PdfStandard {
|
|||||||
/// PDF/A-2b.
|
/// PDF/A-2b.
|
||||||
#[value(name = "a-2b")]
|
#[value(name = "a-2b")]
|
||||||
A_2b,
|
A_2b,
|
||||||
|
/// PDF/A-3b.
|
||||||
|
#[value(name = "a-3b")]
|
||||||
|
A_3b,
|
||||||
}
|
}
|
||||||
|
|
||||||
display_possible_values!(PdfStandard);
|
display_possible_values!(PdfStandard);
|
||||||
|
@ -136,6 +136,7 @@ impl CompileConfig {
|
|||||||
.map(|standard| match standard {
|
.map(|standard| match standard {
|
||||||
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
PdfStandard::V_1_7 => typst_pdf::PdfStandard::V_1_7,
|
||||||
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
PdfStandard::A_2b => typst_pdf::PdfStandard::A_2b,
|
||||||
|
PdfStandard::A_3b => typst_pdf::PdfStandard::A_3b,
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
PdfStandards::new(&list)?
|
PdfStandards::new(&list)?
|
||||||
|
@ -305,7 +305,7 @@ impl FileSlot {
|
|||||||
) -> FileResult<Bytes> {
|
) -> FileResult<Bytes> {
|
||||||
self.file.get_or_init(
|
self.file.get_or_init(
|
||||||
|| read(self.id, project_root, package_storage),
|
|| read(self.id, project_root, package_storage),
|
||||||
|data, _| Ok(data.into()),
|
|data, _| Ok(Bytes::new(data)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -685,8 +685,7 @@ mod tests {
|
|||||||
|
|
||||||
// Named-params.
|
// Named-params.
|
||||||
test(s, "$ foo(bar: y) $", &["foo"]);
|
test(s, "$ foo(bar: y) $", &["foo"]);
|
||||||
// This should be updated when we improve named-param parsing:
|
test(s, "$ foo(x-y: 1, bar-z: 2) $", &["foo"]);
|
||||||
test(s, "$ foo(x-y: 1, bar-z: 2) $", &["bar", "foo"]);
|
|
||||||
|
|
||||||
// Field access in math.
|
// Field access in math.
|
||||||
test(s, "$ foo.bar $", &["foo"]);
|
test(s, "$ foo.bar $", &["foo"]);
|
||||||
|
@ -211,7 +211,7 @@ fn resolve_package(
|
|||||||
// Evaluate the manifest.
|
// Evaluate the manifest.
|
||||||
let manifest_id = FileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
|
let manifest_id = FileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
|
||||||
let bytes = engine.world.file(manifest_id).at(span)?;
|
let bytes = engine.world.file(manifest_id).at(span)?;
|
||||||
let string = std::str::from_utf8(&bytes).map_err(FileError::from).at(span)?;
|
let string = bytes.as_str().map_err(FileError::from).at(span)?;
|
||||||
let manifest: PackageManifest = toml::from_str(string)
|
let manifest: PackageManifest = toml::from_str(string)
|
||||||
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
|
.map_err(|err| eco_format!("package manifest is malformed ({})", err.message()))
|
||||||
.at(span)?;
|
.at(span)?;
|
||||||
|
@ -817,19 +817,8 @@ fn param_value_completions<'a>(
|
|||||||
) {
|
) {
|
||||||
if param.name == "font" {
|
if param.name == "font" {
|
||||||
ctx.font_completions();
|
ctx.font_completions();
|
||||||
} else if param.name == "path" {
|
} else if let Some(extensions) = path_completion(func, param) {
|
||||||
ctx.file_completions_with_extensions(match func.name() {
|
ctx.file_completions_with_extensions(extensions);
|
||||||
Some("image") => &["png", "jpg", "jpeg", "gif", "svg", "svgz"],
|
|
||||||
Some("csv") => &["csv"],
|
|
||||||
Some("plugin") => &["wasm"],
|
|
||||||
Some("cbor") => &["cbor"],
|
|
||||||
Some("json") => &["json"],
|
|
||||||
Some("toml") => &["toml"],
|
|
||||||
Some("xml") => &["xml"],
|
|
||||||
Some("yaml") => &["yml", "yaml"],
|
|
||||||
Some("bibliography") => &["bib", "yml", "yaml"],
|
|
||||||
_ => &[],
|
|
||||||
});
|
|
||||||
} else if func.name() == Some("figure") && param.name == "body" {
|
} else if func.name() == Some("figure") && param.name == "body" {
|
||||||
ctx.snippet_completion("image", "image(\"${}\"),", "An image in a figure.");
|
ctx.snippet_completion("image", "image(\"${}\"),", "An image in a figure.");
|
||||||
ctx.snippet_completion("table", "table(\n ${}\n),", "A table in a figure.");
|
ctx.snippet_completion("table", "table(\n ${}\n),", "A table in a figure.");
|
||||||
@ -838,6 +827,28 @@ fn param_value_completions<'a>(
|
|||||||
ctx.cast_completions(¶m.input);
|
ctx.cast_completions(¶m.input);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns which file extensions to complete for the given parameter if any.
|
||||||
|
fn path_completion(func: &Func, param: &ParamInfo) -> Option<&'static [&'static str]> {
|
||||||
|
Some(match (func.name(), param.name) {
|
||||||
|
(Some("image"), "source") => &["png", "jpg", "jpeg", "gif", "svg", "svgz"],
|
||||||
|
(Some("csv"), "source") => &["csv"],
|
||||||
|
(Some("plugin"), "source") => &["wasm"],
|
||||||
|
(Some("cbor"), "source") => &["cbor"],
|
||||||
|
(Some("json"), "source") => &["json"],
|
||||||
|
(Some("toml"), "source") => &["toml"],
|
||||||
|
(Some("xml"), "source") => &["xml"],
|
||||||
|
(Some("yaml"), "source") => &["yml", "yaml"],
|
||||||
|
(Some("bibliography"), "sources") => &["bib", "yml", "yaml"],
|
||||||
|
(Some("bibliography"), "style") => &["csl"],
|
||||||
|
(Some("cite"), "style") => &["csl"],
|
||||||
|
(Some("raw"), "syntaxes") => &["sublime-syntax"],
|
||||||
|
(Some("raw"), "theme") => &["tmtheme"],
|
||||||
|
(Some("embed"), "path") => &[],
|
||||||
|
(None, "path") => &[],
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Resolve a callee expression to a global function.
|
/// Resolve a callee expression to a global function.
|
||||||
fn resolve_global_callee<'a>(
|
fn resolve_global_callee<'a>(
|
||||||
ctx: &CompletionContext<'a>,
|
ctx: &CompletionContext<'a>,
|
||||||
|
@ -55,7 +55,7 @@ impl TestWorld {
|
|||||||
pub fn with_asset_at(mut self, path: &str, filename: &str) -> Self {
|
pub fn with_asset_at(mut self, path: &str, filename: &str) -> Self {
|
||||||
let id = FileId::new(None, VirtualPath::new(path));
|
let id = FileId::new(None, VirtualPath::new(path));
|
||||||
let data = typst_dev_assets::get_by_name(filename).unwrap();
|
let data = typst_dev_assets::get_by_name(filename).unwrap();
|
||||||
let bytes = Bytes::from_static(data);
|
let bytes = Bytes::new(data);
|
||||||
Arc::make_mut(&mut self.files).assets.insert(id, bytes);
|
Arc::make_mut(&mut self.files).assets.insert(id, bytes);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@ -152,7 +152,7 @@ impl Default for TestBase {
|
|||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
let fonts: Vec<_> = typst_assets::fonts()
|
let fonts: Vec<_> = typst_assets::fonts()
|
||||||
.chain(typst_dev_assets::fonts())
|
.chain(typst_dev_assets::fonts())
|
||||||
.flat_map(|data| Font::iter(Bytes::from_static(data)))
|
.flat_map(|data| Font::iter(Bytes::new(data)))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
|
@ -13,6 +13,7 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::sync::OnceLock;
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use fontdb::{Database, Source};
|
use fontdb::{Database, Source};
|
||||||
|
use typst_library::foundations::Bytes;
|
||||||
use typst_library::text::{Font, FontBook, FontInfo};
|
use typst_library::text::{Font, FontBook, FontInfo};
|
||||||
use typst_timing::TimingScope;
|
use typst_timing::TimingScope;
|
||||||
|
|
||||||
@ -52,9 +53,8 @@ impl FontSlot {
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.expect("`path` is not `None` if `font` is uninitialized"),
|
.expect("`path` is not `None` if `font` is uninitialized"),
|
||||||
)
|
)
|
||||||
.ok()?
|
.ok()?;
|
||||||
.into();
|
Font::new(Bytes::new(data), self.index)
|
||||||
Font::new(data, self.index)
|
|
||||||
})
|
})
|
||||||
.clone()
|
.clone()
|
||||||
}
|
}
|
||||||
@ -196,7 +196,7 @@ impl FontSearcher {
|
|||||||
#[cfg(feature = "embed-fonts")]
|
#[cfg(feature = "embed-fonts")]
|
||||||
fn add_embedded(&mut self) {
|
fn add_embedded(&mut self) {
|
||||||
for data in typst_assets::fonts() {
|
for data in typst_assets::fonts() {
|
||||||
let buffer = typst_library::foundations::Bytes::from_static(data);
|
let buffer = Bytes::new(data);
|
||||||
for (i, font) in Font::iter(buffer).enumerate() {
|
for (i, font) in Font::iter(buffer).enumerate() {
|
||||||
self.book.push(font.info().clone());
|
self.book.push(font.info().clone());
|
||||||
self.fonts.push(FontSlot {
|
self.fonts.push(FontSlot {
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
|
|
||||||
use typst_library::diag::{bail, warning, At, SourceResult, StrResult};
|
use typst_library::diag::{warning, At, SourceResult, StrResult};
|
||||||
use typst_library::engine::Engine;
|
use typst_library::engine::Engine;
|
||||||
use typst_library::foundations::{Packed, Smart, StyleChain};
|
use typst_library::foundations::{Bytes, Derived, Packed, Smart, StyleChain};
|
||||||
use typst_library::introspection::Locator;
|
use typst_library::introspection::Locator;
|
||||||
use typst_library::layout::{
|
use typst_library::layout::{
|
||||||
Abs, Axes, FixedAlignment, Frame, FrameItem, Point, Region, Size,
|
Abs, Axes, FixedAlignment, Frame, FrameItem, Point, Region, Size,
|
||||||
};
|
};
|
||||||
use typst_library::loading::Readable;
|
use typst_library::loading::DataSource;
|
||||||
use typst_library::text::families;
|
use typst_library::text::families;
|
||||||
use typst_library::visualize::{
|
use typst_library::visualize::{
|
||||||
Curve, Image, ImageElem, ImageFit, ImageFormat, RasterFormat, VectorFormat,
|
Curve, Image, ImageElem, ImageFit, ImageFormat, RasterFormat, VectorFormat,
|
||||||
@ -26,17 +26,17 @@ pub fn layout_image(
|
|||||||
|
|
||||||
// Take the format that was explicitly defined, or parse the extension,
|
// Take the format that was explicitly defined, or parse the extension,
|
||||||
// or try to detect the format.
|
// or try to detect the format.
|
||||||
let data = elem.data();
|
let Derived { source, derived: data } = &elem.source;
|
||||||
let format = match elem.format(styles) {
|
let format = match elem.format(styles) {
|
||||||
Smart::Custom(v) => v,
|
Smart::Custom(v) => v,
|
||||||
Smart::Auto => determine_format(elem.path().as_str(), data).at(span)?,
|
Smart::Auto => determine_format(source, data).at(span)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Warn the user if the image contains a foreign object. Not perfect
|
// Warn the user if the image contains a foreign object. Not perfect
|
||||||
// because the svg could also be encoded, but that's an edge case.
|
// because the svg could also be encoded, but that's an edge case.
|
||||||
if format == ImageFormat::Vector(VectorFormat::Svg) {
|
if format == ImageFormat::Vector(VectorFormat::Svg) {
|
||||||
let has_foreign_object =
|
let has_foreign_object =
|
||||||
data.as_str().is_some_and(|s| s.contains("<foreignObject"));
|
data.as_str().is_ok_and(|s| s.contains("<foreignObject"));
|
||||||
|
|
||||||
if has_foreign_object {
|
if has_foreign_object {
|
||||||
engine.sink.warn(warning!(
|
engine.sink.warn(warning!(
|
||||||
@ -50,7 +50,7 @@ pub fn layout_image(
|
|||||||
|
|
||||||
// Construct the image itself.
|
// Construct the image itself.
|
||||||
let image = Image::with_fonts(
|
let image = Image::with_fonts(
|
||||||
data.clone().into(),
|
data.clone(),
|
||||||
format,
|
format,
|
||||||
elem.alt(styles),
|
elem.alt(styles),
|
||||||
engine.world,
|
engine.world,
|
||||||
@ -119,25 +119,23 @@ pub fn layout_image(
|
|||||||
Ok(frame)
|
Ok(frame)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Determine the image format based on path and data.
|
/// Try to determine the image format based on the data.
|
||||||
fn determine_format(path: &str, data: &Readable) -> StrResult<ImageFormat> {
|
fn determine_format(source: &DataSource, data: &Bytes) -> StrResult<ImageFormat> {
|
||||||
let ext = std::path::Path::new(path)
|
if let DataSource::Path(path) = source {
|
||||||
.extension()
|
let ext = std::path::Path::new(path.as_str())
|
||||||
.and_then(OsStr::to_str)
|
.extension()
|
||||||
.unwrap_or_default()
|
.and_then(OsStr::to_str)
|
||||||
.to_lowercase();
|
.unwrap_or_default()
|
||||||
|
.to_lowercase();
|
||||||
|
|
||||||
Ok(match ext.as_str() {
|
match ext.as_str() {
|
||||||
"png" => ImageFormat::Raster(RasterFormat::Png),
|
"png" => return Ok(ImageFormat::Raster(RasterFormat::Png)),
|
||||||
"jpg" | "jpeg" => ImageFormat::Raster(RasterFormat::Jpg),
|
"jpg" | "jpeg" => return Ok(ImageFormat::Raster(RasterFormat::Jpg)),
|
||||||
"gif" => ImageFormat::Raster(RasterFormat::Gif),
|
"gif" => return Ok(ImageFormat::Raster(RasterFormat::Gif)),
|
||||||
"svg" | "svgz" => ImageFormat::Vector(VectorFormat::Svg),
|
"svg" | "svgz" => return Ok(ImageFormat::Vector(VectorFormat::Svg)),
|
||||||
_ => match &data {
|
_ => {}
|
||||||
Readable::Str(_) => ImageFormat::Vector(VectorFormat::Svg),
|
}
|
||||||
Readable::Bytes(bytes) => match RasterFormat::detect(bytes) {
|
}
|
||||||
Some(f) => ImageFormat::Raster(f),
|
|
||||||
None => bail!("unknown image format"),
|
Ok(ImageFormat::detect(data).ok_or("unknown image format")?)
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -161,9 +161,9 @@ pub fn collect<'a>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(case) = TextElem::case_in(styles) {
|
if let Some(case) = TextElem::case_in(styles) {
|
||||||
full.push_str(&case.apply(elem.text()));
|
full.push_str(&case.apply(&elem.text));
|
||||||
} else {
|
} else {
|
||||||
full.push_str(elem.text());
|
full.push_str(&elem.text);
|
||||||
}
|
}
|
||||||
|
|
||||||
if dir != outer_dir {
|
if dir != outer_dir {
|
||||||
@ -172,13 +172,12 @@ pub fn collect<'a>(
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else if let Some(elem) = child.to_packed::<HElem>() {
|
} else if let Some(elem) = child.to_packed::<HElem>() {
|
||||||
let amount = elem.amount();
|
if elem.amount.is_zero() {
|
||||||
if amount.is_zero() {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
collector.push_item(match amount {
|
collector.push_item(match elem.amount {
|
||||||
Spacing::Fr(fr) => Item::Fractional(*fr, None),
|
Spacing::Fr(fr) => Item::Fractional(fr, None),
|
||||||
Spacing::Rel(rel) => Item::Absolute(
|
Spacing::Rel(rel) => Item::Absolute(
|
||||||
rel.resolve(styles).relative_to(region.x),
|
rel.resolve(styles).relative_to(region.x),
|
||||||
elem.weak(styles),
|
elem.weak(styles),
|
||||||
|
@ -40,7 +40,7 @@ pub fn layout_list(
|
|||||||
let mut cells = vec![];
|
let mut cells = vec![];
|
||||||
let mut locator = locator.split();
|
let mut locator = locator.split();
|
||||||
|
|
||||||
for item in elem.children() {
|
for item in &elem.children {
|
||||||
cells.push(Cell::new(Content::empty(), locator.next(&())));
|
cells.push(Cell::new(Content::empty(), locator.next(&())));
|
||||||
cells.push(Cell::new(marker.clone(), locator.next(&marker.span())));
|
cells.push(Cell::new(marker.clone(), locator.next(&marker.span())));
|
||||||
cells.push(Cell::new(Content::empty(), locator.next(&())));
|
cells.push(Cell::new(Content::empty(), locator.next(&())));
|
||||||
@ -101,7 +101,7 @@ pub fn layout_enum(
|
|||||||
// relation to the item it refers to.
|
// relation to the item it refers to.
|
||||||
let number_align = elem.number_align(styles);
|
let number_align = elem.number_align(styles);
|
||||||
|
|
||||||
for item in elem.children() {
|
for item in &elem.children {
|
||||||
number = item.number(styles).unwrap_or(number);
|
number = item.number(styles).unwrap_or(number);
|
||||||
|
|
||||||
let context = Context::new(None, Some(styles));
|
let context = Context::new(None, Some(styles));
|
||||||
|
@ -16,7 +16,7 @@ pub fn layout_accent(
|
|||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let cramped = style_cramped();
|
let cramped = style_cramped();
|
||||||
let mut base = ctx.layout_into_fragment(elem.base(), styles.chain(&cramped))?;
|
let mut base = ctx.layout_into_fragment(&elem.base, styles.chain(&cramped))?;
|
||||||
|
|
||||||
// Try to replace a glyph with its dotless variant.
|
// Try to replace a glyph with its dotless variant.
|
||||||
if let MathFragment::Glyph(glyph) = &mut base {
|
if let MathFragment::Glyph(glyph) = &mut base {
|
||||||
@ -29,8 +29,8 @@ pub fn layout_accent(
|
|||||||
|
|
||||||
let width = elem.size(styles).relative_to(base.width());
|
let width = elem.size(styles).relative_to(base.width());
|
||||||
|
|
||||||
let Accent(c) = elem.accent();
|
let Accent(c) = elem.accent;
|
||||||
let mut glyph = GlyphFragment::new(ctx, styles, *c, elem.span());
|
let mut glyph = GlyphFragment::new(ctx, styles, c, elem.span());
|
||||||
|
|
||||||
// Try to replace accent glyph with flattened variant.
|
// Try to replace accent glyph with flattened variant.
|
||||||
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
|
let flattened_base_height = scaled!(ctx, styles, flattened_accent_base_height);
|
||||||
|
@ -29,7 +29,7 @@ pub fn layout_attach(
|
|||||||
let elem = merged.as_ref().unwrap_or(elem);
|
let elem = merged.as_ref().unwrap_or(elem);
|
||||||
let stretch = stretch_size(styles, elem);
|
let stretch = stretch_size(styles, elem);
|
||||||
|
|
||||||
let mut base = ctx.layout_into_fragment(elem.base(), styles)?;
|
let mut base = ctx.layout_into_fragment(&elem.base, styles)?;
|
||||||
let sup_style = style_for_superscript(styles);
|
let sup_style = style_for_superscript(styles);
|
||||||
let sup_style_chain = styles.chain(&sup_style);
|
let sup_style_chain = styles.chain(&sup_style);
|
||||||
let tl = elem.tl(sup_style_chain);
|
let tl = elem.tl(sup_style_chain);
|
||||||
@ -95,7 +95,7 @@ pub fn layout_primes(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
match *elem.count() {
|
match elem.count {
|
||||||
count @ 1..=4 => {
|
count @ 1..=4 => {
|
||||||
let c = match count {
|
let c = match count {
|
||||||
1 => '′',
|
1 => '′',
|
||||||
@ -134,7 +134,7 @@ pub fn layout_scripts(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let mut fragment = ctx.layout_into_fragment(elem.body(), styles)?;
|
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
|
||||||
fragment.set_limits(Limits::Never);
|
fragment.set_limits(Limits::Never);
|
||||||
ctx.push(fragment);
|
ctx.push(fragment);
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -148,7 +148,7 @@ pub fn layout_limits(
|
|||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let limits = if elem.inline(styles) { Limits::Always } else { Limits::Display };
|
let limits = if elem.inline(styles) { Limits::Always } else { Limits::Display };
|
||||||
let mut fragment = ctx.layout_into_fragment(elem.body(), styles)?;
|
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
|
||||||
fragment.set_limits(limits);
|
fragment.set_limits(limits);
|
||||||
ctx.push(fragment);
|
ctx.push(fragment);
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -157,9 +157,9 @@ pub fn layout_limits(
|
|||||||
/// Get the size to stretch the base to.
|
/// Get the size to stretch the base to.
|
||||||
fn stretch_size(styles: StyleChain, elem: &Packed<AttachElem>) -> Option<Rel<Abs>> {
|
fn stretch_size(styles: StyleChain, elem: &Packed<AttachElem>) -> Option<Rel<Abs>> {
|
||||||
// Extract from an EquationElem.
|
// Extract from an EquationElem.
|
||||||
let mut base = elem.base();
|
let mut base = &elem.base;
|
||||||
while let Some(equation) = base.to_packed::<EquationElem>() {
|
while let Some(equation) = base.to_packed::<EquationElem>() {
|
||||||
base = equation.body();
|
base = &equation.body;
|
||||||
}
|
}
|
||||||
|
|
||||||
base.to_packed::<StretchElem>().map(|stretch| stretch.size(styles))
|
base.to_packed::<StretchElem>().map(|stretch| stretch.size(styles))
|
||||||
|
@ -16,7 +16,7 @@ pub fn layout_cancel(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let body = ctx.layout_into_fragment(elem.body(), styles)?;
|
let body = ctx.layout_into_fragment(&elem.body, styles)?;
|
||||||
|
|
||||||
// Preserve properties of body.
|
// Preserve properties of body.
|
||||||
let body_class = body.class();
|
let body_class = body.class();
|
||||||
|
@ -23,8 +23,8 @@ pub fn layout_frac(
|
|||||||
layout_frac_like(
|
layout_frac_like(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.num(),
|
&elem.num,
|
||||||
std::slice::from_ref(elem.denom()),
|
std::slice::from_ref(&elem.denom),
|
||||||
false,
|
false,
|
||||||
elem.span(),
|
elem.span(),
|
||||||
)
|
)
|
||||||
@ -37,7 +37,7 @@ pub fn layout_binom(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
layout_frac_like(ctx, styles, elem.upper(), elem.lower(), true, elem.span())
|
layout_frac_like(ctx, styles, &elem.upper, &elem.lower, true, elem.span())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Layout a fraction or binomial.
|
/// Layout a fraction or binomial.
|
||||||
|
@ -13,17 +13,16 @@ pub fn layout_lr(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let mut body = elem.body();
|
|
||||||
|
|
||||||
// Extract from an EquationElem.
|
// Extract from an EquationElem.
|
||||||
|
let mut body = &elem.body;
|
||||||
if let Some(equation) = body.to_packed::<EquationElem>() {
|
if let Some(equation) = body.to_packed::<EquationElem>() {
|
||||||
body = equation.body();
|
body = &equation.body;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract implicit LrElem.
|
// Extract implicit LrElem.
|
||||||
if let Some(lr) = body.to_packed::<LrElem>() {
|
if let Some(lr) = body.to_packed::<LrElem>() {
|
||||||
if lr.size(styles).is_one() {
|
if lr.size(styles).is_one() {
|
||||||
body = lr.body();
|
body = &lr.body;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,7 +99,7 @@ pub fn layout_mid(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let mut fragments = ctx.layout_into_fragments(elem.body(), styles)?;
|
let mut fragments = ctx.layout_into_fragments(&elem.body, styles)?;
|
||||||
|
|
||||||
for fragment in &mut fragments {
|
for fragment in &mut fragments {
|
||||||
match fragment {
|
match fragment {
|
||||||
|
@ -27,7 +27,7 @@ pub fn layout_vec(
|
|||||||
let frame = layout_vec_body(
|
let frame = layout_vec_body(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.children(),
|
&elem.children,
|
||||||
elem.align(styles),
|
elem.align(styles),
|
||||||
elem.gap(styles),
|
elem.gap(styles),
|
||||||
LeftRightAlternator::Right,
|
LeftRightAlternator::Right,
|
||||||
@ -44,7 +44,7 @@ pub fn layout_mat(
|
|||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let augment = elem.augment(styles);
|
let augment = elem.augment(styles);
|
||||||
let rows = elem.rows();
|
let rows = &elem.rows;
|
||||||
|
|
||||||
if let Some(aug) = &augment {
|
if let Some(aug) = &augment {
|
||||||
for &offset in &aug.hline.0 {
|
for &offset in &aug.hline.0 {
|
||||||
@ -58,7 +58,7 @@ pub fn layout_mat(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let ncols = elem.rows().first().map_or(0, |row| row.len());
|
let ncols = rows.first().map_or(0, |row| row.len());
|
||||||
|
|
||||||
for &offset in &aug.vline.0 {
|
for &offset in &aug.vline.0 {
|
||||||
if offset == 0 || offset.unsigned_abs() >= ncols {
|
if offset == 0 || offset.unsigned_abs() >= ncols {
|
||||||
@ -97,7 +97,7 @@ pub fn layout_cases(
|
|||||||
let frame = layout_vec_body(
|
let frame = layout_vec_body(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.children(),
|
&elem.children,
|
||||||
FixedAlignment::Start,
|
FixedAlignment::Start,
|
||||||
elem.gap(styles),
|
elem.gap(styles),
|
||||||
LeftRightAlternator::None,
|
LeftRightAlternator::None,
|
||||||
|
@ -632,7 +632,7 @@ fn layout_h(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
if let Spacing::Rel(rel) = elem.amount() {
|
if let Spacing::Rel(rel) = elem.amount {
|
||||||
if rel.rel.is_zero() {
|
if rel.rel.is_zero() {
|
||||||
ctx.push(MathFragment::Spacing(rel.abs.resolve(styles), elem.weak(styles)));
|
ctx.push(MathFragment::Spacing(rel.abs.resolve(styles), elem.weak(styles)));
|
||||||
}
|
}
|
||||||
@ -647,11 +647,10 @@ fn layout_class(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let class = *elem.class();
|
let style = EquationElem::set_class(Some(elem.class)).wrap();
|
||||||
let style = EquationElem::set_class(Some(class)).wrap();
|
let mut fragment = ctx.layout_into_fragment(&elem.body, styles.chain(&style))?;
|
||||||
let mut fragment = ctx.layout_into_fragment(elem.body(), styles.chain(&style))?;
|
fragment.set_class(elem.class);
|
||||||
fragment.set_class(class);
|
fragment.set_limits(Limits::for_class(elem.class));
|
||||||
fragment.set_limits(Limits::for_class(class));
|
|
||||||
ctx.push(fragment);
|
ctx.push(fragment);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -663,7 +662,7 @@ fn layout_op(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let fragment = ctx.layout_into_fragment(elem.text(), styles)?;
|
let fragment = ctx.layout_into_fragment(&elem.text, styles)?;
|
||||||
let italics = fragment.italics_correction();
|
let italics = fragment.italics_correction();
|
||||||
let accent_attach = fragment.accent_attach();
|
let accent_attach = fragment.accent_attach();
|
||||||
let text_like = fragment.is_text_like();
|
let text_like = fragment.is_text_like();
|
||||||
|
@ -18,7 +18,6 @@ pub fn layout_root(
|
|||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let index = elem.index(styles);
|
let index = elem.index(styles);
|
||||||
let radicand = elem.radicand();
|
|
||||||
let span = elem.span();
|
let span = elem.span();
|
||||||
|
|
||||||
let gap = scaled!(
|
let gap = scaled!(
|
||||||
@ -36,7 +35,7 @@ pub fn layout_root(
|
|||||||
let radicand = {
|
let radicand = {
|
||||||
let cramped = style_cramped();
|
let cramped = style_cramped();
|
||||||
let styles = styles.chain(&cramped);
|
let styles = styles.chain(&cramped);
|
||||||
let run = ctx.layout_into_run(radicand, styles)?;
|
let run = ctx.layout_into_run(&elem.radicand, styles)?;
|
||||||
let multiline = run.is_multiline();
|
let multiline = run.is_multiline();
|
||||||
let mut radicand = run.into_fragment(styles).into_frame();
|
let mut radicand = run.into_fragment(styles).into_frame();
|
||||||
if multiline {
|
if multiline {
|
||||||
|
@ -21,7 +21,7 @@ pub fn layout_stretch(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let mut fragment = ctx.layout_into_fragment(elem.body(), styles)?;
|
let mut fragment = ctx.layout_into_fragment(&elem.body, styles)?;
|
||||||
stretch_fragment(
|
stretch_fragment(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
|
@ -20,7 +20,7 @@ pub fn layout_text(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
let text = elem.text();
|
let text = &elem.text;
|
||||||
let span = elem.span();
|
let span = elem.span();
|
||||||
let mut chars = text.chars();
|
let mut chars = text.chars();
|
||||||
let math_size = EquationElem::size_in(styles);
|
let math_size = EquationElem::size_in(styles);
|
||||||
|
@ -32,7 +32,7 @@ pub fn layout_underline(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
layout_underoverline(ctx, styles, elem.body(), elem.span(), Position::Under)
|
layout_underoverline(ctx, styles, &elem.body, elem.span(), Position::Under)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lays out an [`OverlineElem`].
|
/// Lays out an [`OverlineElem`].
|
||||||
@ -42,7 +42,7 @@ pub fn layout_overline(
|
|||||||
ctx: &mut MathContext,
|
ctx: &mut MathContext,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<()> {
|
) -> SourceResult<()> {
|
||||||
layout_underoverline(ctx, styles, elem.body(), elem.span(), Position::Over)
|
layout_underoverline(ctx, styles, &elem.body, elem.span(), Position::Over)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lays out an [`UnderbraceElem`].
|
/// Lays out an [`UnderbraceElem`].
|
||||||
@ -55,7 +55,7 @@ pub fn layout_underbrace(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏟',
|
'⏟',
|
||||||
BRACE_GAP,
|
BRACE_GAP,
|
||||||
@ -74,7 +74,7 @@ pub fn layout_overbrace(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏞',
|
'⏞',
|
||||||
BRACE_GAP,
|
BRACE_GAP,
|
||||||
@ -93,7 +93,7 @@ pub fn layout_underbracket(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⎵',
|
'⎵',
|
||||||
BRACKET_GAP,
|
BRACKET_GAP,
|
||||||
@ -112,7 +112,7 @@ pub fn layout_overbracket(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⎴',
|
'⎴',
|
||||||
BRACKET_GAP,
|
BRACKET_GAP,
|
||||||
@ -131,7 +131,7 @@ pub fn layout_underparen(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏝',
|
'⏝',
|
||||||
PAREN_GAP,
|
PAREN_GAP,
|
||||||
@ -150,7 +150,7 @@ pub fn layout_overparen(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏜',
|
'⏜',
|
||||||
PAREN_GAP,
|
PAREN_GAP,
|
||||||
@ -169,7 +169,7 @@ pub fn layout_undershell(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏡',
|
'⏡',
|
||||||
SHELL_GAP,
|
SHELL_GAP,
|
||||||
@ -188,7 +188,7 @@ pub fn layout_overshell(
|
|||||||
layout_underoverspreader(
|
layout_underoverspreader(
|
||||||
ctx,
|
ctx,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
&elem.annotation(styles),
|
&elem.annotation(styles),
|
||||||
'⏠',
|
'⏠',
|
||||||
SHELL_GAP,
|
SHELL_GAP,
|
||||||
|
@ -62,7 +62,7 @@ pub fn layout_path(
|
|||||||
axes.resolve(styles).zip_map(region.size, Rel::relative_to).to_point()
|
axes.resolve(styles).zip_map(region.size, Rel::relative_to).to_point()
|
||||||
};
|
};
|
||||||
|
|
||||||
let vertices = elem.vertices();
|
let vertices = &elem.vertices;
|
||||||
let points: Vec<Point> = vertices.iter().map(|c| resolve(c.vertex())).collect();
|
let points: Vec<Point> = vertices.iter().map(|c| resolve(c.vertex())).collect();
|
||||||
|
|
||||||
let mut size = Size::zero();
|
let mut size = Size::zero();
|
||||||
@ -150,7 +150,7 @@ pub fn layout_curve(
|
|||||||
) -> SourceResult<Frame> {
|
) -> SourceResult<Frame> {
|
||||||
let mut builder = CurveBuilder::new(region, styles);
|
let mut builder = CurveBuilder::new(region, styles);
|
||||||
|
|
||||||
for item in elem.components() {
|
for item in &elem.components {
|
||||||
match item {
|
match item {
|
||||||
CurveComponent::Move(element) => {
|
CurveComponent::Move(element) => {
|
||||||
let relative = element.relative(styles);
|
let relative = element.relative(styles);
|
||||||
@ -399,7 +399,7 @@ pub fn layout_polygon(
|
|||||||
region: Region,
|
region: Region,
|
||||||
) -> SourceResult<Frame> {
|
) -> SourceResult<Frame> {
|
||||||
let points: Vec<Point> = elem
|
let points: Vec<Point> = elem
|
||||||
.vertices()
|
.vertices
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| c.resolve(styles).zip_map(region.size, Rel::relative_to).to_point())
|
.map(|c| c.resolve(styles).zip_map(region.size, Rel::relative_to).to_point())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -27,7 +27,7 @@ pub fn layout_stack(
|
|||||||
let spacing = elem.spacing(styles);
|
let spacing = elem.spacing(styles);
|
||||||
let mut deferred = None;
|
let mut deferred = None;
|
||||||
|
|
||||||
for child in elem.children() {
|
for child in &elem.children {
|
||||||
match child {
|
match child {
|
||||||
StackChild::Spacing(kind) => {
|
StackChild::Spacing(kind) => {
|
||||||
layouter.layout_spacing(*kind);
|
layouter.layout_spacing(*kind);
|
||||||
@ -36,14 +36,14 @@ pub fn layout_stack(
|
|||||||
StackChild::Block(block) => {
|
StackChild::Block(block) => {
|
||||||
// Transparently handle `h`.
|
// Transparently handle `h`.
|
||||||
if let (Axis::X, Some(h)) = (axis, block.to_packed::<HElem>()) {
|
if let (Axis::X, Some(h)) = (axis, block.to_packed::<HElem>()) {
|
||||||
layouter.layout_spacing(*h.amount());
|
layouter.layout_spacing(h.amount);
|
||||||
deferred = None;
|
deferred = None;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Transparently handle `v`.
|
// Transparently handle `v`.
|
||||||
if let (Axis::Y, Some(v)) = (axis, block.to_packed::<VElem>()) {
|
if let (Axis::Y, Some(v)) = (axis, block.to_packed::<VElem>()) {
|
||||||
layouter.layout_spacing(*v.amount());
|
layouter.layout_spacing(v.amount);
|
||||||
deferred = None;
|
deferred = None;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ pub fn layout_rotate(
|
|||||||
region,
|
region,
|
||||||
size,
|
size,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
Transform::rotate(angle),
|
Transform::rotate(angle),
|
||||||
align,
|
align,
|
||||||
elem.reflow(styles),
|
elem.reflow(styles),
|
||||||
@ -81,7 +81,7 @@ pub fn layout_scale(
|
|||||||
region,
|
region,
|
||||||
size,
|
size,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
Transform::scale(scale.x, scale.y),
|
Transform::scale(scale.x, scale.y),
|
||||||
elem.origin(styles).resolve(styles),
|
elem.origin(styles).resolve(styles),
|
||||||
elem.reflow(styles),
|
elem.reflow(styles),
|
||||||
@ -169,7 +169,7 @@ pub fn layout_skew(
|
|||||||
region,
|
region,
|
||||||
size,
|
size,
|
||||||
styles,
|
styles,
|
||||||
elem.body(),
|
&elem.body,
|
||||||
Transform::skew(ax, ay),
|
Transform::skew(ax, ay),
|
||||||
align,
|
align,
|
||||||
elem.reflow(styles),
|
elem.reflow(styles),
|
||||||
|
@ -305,8 +305,6 @@ impl Args {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(constructor)]
|
#[func(constructor)]
|
||||||
pub fn construct(
|
pub fn construct(
|
||||||
/// The real arguments (the other argument is just for the docs).
|
|
||||||
/// The docs argument cannot be called `args`.
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The arguments to construct.
|
/// The arguments to construct.
|
||||||
#[external]
|
#[external]
|
||||||
|
@ -301,9 +301,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn find(
|
pub fn find(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item. Must return a boolean.
|
/// The function to apply to each item. Must return a boolean.
|
||||||
searcher: Func,
|
searcher: Func,
|
||||||
@ -325,9 +323,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn position(
|
pub fn position(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item. Must return a boolean.
|
/// The function to apply to each item. Must return a boolean.
|
||||||
searcher: Func,
|
searcher: Func,
|
||||||
@ -363,8 +359,6 @@ impl Array {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn range(
|
pub fn range(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The start of the range (inclusive).
|
/// The start of the range (inclusive).
|
||||||
#[external]
|
#[external]
|
||||||
@ -402,9 +396,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn filter(
|
pub fn filter(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item. Must return a boolean.
|
/// The function to apply to each item. Must return a boolean.
|
||||||
test: Func,
|
test: Func,
|
||||||
@ -427,9 +419,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn map(
|
pub fn map(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item.
|
/// The function to apply to each item.
|
||||||
mapper: Func,
|
mapper: Func,
|
||||||
@ -481,8 +471,6 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn zip(
|
pub fn zip(
|
||||||
self,
|
self,
|
||||||
/// The real arguments (the `others` arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the positional arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// Whether all arrays have to have the same length.
|
/// Whether all arrays have to have the same length.
|
||||||
/// For example, `{(1, 2).zip((1, 2, 3), exact: true)}` produces an
|
/// For example, `{(1, 2).zip((1, 2, 3), exact: true)}` produces an
|
||||||
@ -569,9 +557,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn fold(
|
pub fn fold(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The initial value to start with.
|
/// The initial value to start with.
|
||||||
init: Value,
|
init: Value,
|
||||||
@ -631,9 +617,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn any(
|
pub fn any(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item. Must return a boolean.
|
/// The function to apply to each item. Must return a boolean.
|
||||||
test: Func,
|
test: Func,
|
||||||
@ -651,9 +635,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn all(
|
pub fn all(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The function to apply to each item. Must return a boolean.
|
/// The function to apply to each item. Must return a boolean.
|
||||||
test: Func,
|
test: Func,
|
||||||
@ -831,11 +813,8 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn sorted(
|
pub fn sorted(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// If given, applies this function to the elements in the array to
|
/// If given, applies this function to the elements in the array to
|
||||||
/// determine the keys to sort by.
|
/// determine the keys to sort by.
|
||||||
@ -881,9 +860,7 @@ impl Array {
|
|||||||
#[func(title = "Deduplicate")]
|
#[func(title = "Deduplicate")]
|
||||||
pub fn dedup(
|
pub fn dedup(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// If given, applies this function to the elements in the array to
|
/// If given, applies this function to the elements in the array to
|
||||||
/// determine the keys to deduplicate by.
|
/// determine the keys to deduplicate by.
|
||||||
@ -967,9 +944,7 @@ impl Array {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn reduce(
|
pub fn reduce(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The reducing function. Must have two parameters: One for the
|
/// The reducing function. Must have two parameters: One for the
|
||||||
/// accumulated value and one for an item.
|
/// accumulated value and one for an item.
|
||||||
@ -1124,6 +1099,53 @@ impl<T: FromValue, const N: usize> FromValue for SmallVec<[T; N]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// One element, or multiple provided as an array.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
|
pub struct OneOrMultiple<T>(pub Vec<T>);
|
||||||
|
|
||||||
|
impl<T: Reflect> Reflect for OneOrMultiple<T> {
|
||||||
|
fn input() -> CastInfo {
|
||||||
|
T::input() + Array::input()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output() -> CastInfo {
|
||||||
|
T::output() + Array::output()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn castable(value: &Value) -> bool {
|
||||||
|
Array::castable(value) || T::castable(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: IntoValue + Clone> IntoValue for OneOrMultiple<T> {
|
||||||
|
fn into_value(self) -> Value {
|
||||||
|
self.0.into_value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: FromValue> FromValue for OneOrMultiple<T> {
|
||||||
|
fn from_value(value: Value) -> HintedStrResult<Self> {
|
||||||
|
if T::castable(&value) {
|
||||||
|
return Ok(Self(vec![T::from_value(value)?]));
|
||||||
|
}
|
||||||
|
if Array::castable(&value) {
|
||||||
|
return Ok(Self(
|
||||||
|
Array::from_value(value)?
|
||||||
|
.into_iter()
|
||||||
|
.map(|value| T::from_value(value))
|
||||||
|
.collect::<HintedStrResult<_>>()?,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
Err(Self::error(&value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Default for OneOrMultiple<T> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self(vec![])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// The error message when the array is empty.
|
/// The error message when the array is empty.
|
||||||
#[cold]
|
#[cold]
|
||||||
fn array_is_empty() -> EcoString {
|
fn array_is_empty() -> EcoString {
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
use std::borrow::Cow;
|
use std::any::Any;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
|
use std::hash::{Hash, Hasher};
|
||||||
use std::ops::{Add, AddAssign, Deref};
|
use std::ops::{Add, AddAssign, Deref};
|
||||||
|
use std::str::Utf8Error;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
@ -39,28 +41,75 @@ use crate::foundations::{cast, func, scope, ty, Array, Reflect, Repr, Str, Value
|
|||||||
/// #str(data.slice(1, 4))
|
/// #str(data.slice(1, 4))
|
||||||
/// ```
|
/// ```
|
||||||
#[ty(scope, cast)]
|
#[ty(scope, cast)]
|
||||||
#[derive(Clone, Hash, Eq, PartialEq)]
|
#[derive(Clone, Hash)]
|
||||||
pub struct Bytes(Arc<LazyHash<Cow<'static, [u8]>>>);
|
#[allow(clippy::derived_hash_with_manual_eq)]
|
||||||
|
pub struct Bytes(Arc<LazyHash<dyn Bytelike>>);
|
||||||
|
|
||||||
impl Bytes {
|
impl Bytes {
|
||||||
/// Create a buffer from a static byte slice.
|
/// Create `Bytes` from anything byte-like.
|
||||||
pub fn from_static(slice: &'static [u8]) -> Self {
|
///
|
||||||
Self(Arc::new(LazyHash::new(Cow::Borrowed(slice))))
|
/// The `data` type will directly back this bytes object. This means you can
|
||||||
|
/// e.g. pass `&'static [u8]` or `[u8; 8]` and no extra vector will be
|
||||||
|
/// allocated.
|
||||||
|
///
|
||||||
|
/// If the type is `Vec<u8>` and the `Bytes` are unique (i.e. not cloned),
|
||||||
|
/// the vector will be reused when mutating to the `Bytes`.
|
||||||
|
///
|
||||||
|
/// If your source type is a string, prefer [`Bytes::from_string`] to
|
||||||
|
/// directly use the UTF-8 encoded string data without any copying.
|
||||||
|
pub fn new<T>(data: T) -> Self
|
||||||
|
where
|
||||||
|
T: AsRef<[u8]> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
Self(Arc::new(LazyHash::new(data)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create `Bytes` from anything string-like, implicitly viewing the UTF-8
|
||||||
|
/// representation.
|
||||||
|
///
|
||||||
|
/// The `data` type will directly back this bytes object. This means you can
|
||||||
|
/// e.g. pass `String` or `EcoString` without any copying.
|
||||||
|
pub fn from_string<T>(data: T) -> Self
|
||||||
|
where
|
||||||
|
T: AsRef<str> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
Self(Arc::new(LazyHash::new(StrWrapper(data))))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if the length is 0.
|
/// Return `true` if the length is 0.
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.0.is_empty()
|
self.as_slice().is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a view into the buffer.
|
/// Return a view into the bytes.
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
pub fn as_slice(&self) -> &[u8] {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return a copy of the buffer as a vector.
|
/// Try to view the bytes as an UTF-8 string.
|
||||||
|
///
|
||||||
|
/// If these bytes were created via `Bytes::from_string`, UTF-8 validation
|
||||||
|
/// is skipped.
|
||||||
|
pub fn as_str(&self) -> Result<&str, Utf8Error> {
|
||||||
|
self.inner().as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a copy of the bytes as a vector.
|
||||||
pub fn to_vec(&self) -> Vec<u8> {
|
pub fn to_vec(&self) -> Vec<u8> {
|
||||||
self.0.to_vec()
|
self.as_slice().to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Try to turn the bytes into a `Str`.
|
||||||
|
///
|
||||||
|
/// - If these bytes were created via `Bytes::from_string::<Str>`, the
|
||||||
|
/// string is cloned directly.
|
||||||
|
/// - If these bytes were created via `Bytes::from_string`, but from a
|
||||||
|
/// different type of string, UTF-8 validation is still skipped.
|
||||||
|
pub fn to_str(&self) -> Result<Str, Utf8Error> {
|
||||||
|
match self.inner().as_any().downcast_ref::<Str>() {
|
||||||
|
Some(string) => Ok(string.clone()),
|
||||||
|
None => self.as_str().map(Into::into),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve an index or throw an out of bounds error.
|
/// Resolve an index or throw an out of bounds error.
|
||||||
@ -72,12 +121,15 @@ impl Bytes {
|
|||||||
///
|
///
|
||||||
/// `index == len` is considered in bounds.
|
/// `index == len` is considered in bounds.
|
||||||
fn locate_opt(&self, index: i64) -> Option<usize> {
|
fn locate_opt(&self, index: i64) -> Option<usize> {
|
||||||
|
let len = self.as_slice().len();
|
||||||
let wrapped =
|
let wrapped =
|
||||||
if index >= 0 { Some(index) } else { (self.len() as i64).checked_add(index) };
|
if index >= 0 { Some(index) } else { (len as i64).checked_add(index) };
|
||||||
|
wrapped.and_then(|v| usize::try_from(v).ok()).filter(|&v| v <= len)
|
||||||
|
}
|
||||||
|
|
||||||
wrapped
|
/// Access the inner `dyn Bytelike`.
|
||||||
.and_then(|v| usize::try_from(v).ok())
|
fn inner(&self) -> &dyn Bytelike {
|
||||||
.filter(|&v| v <= self.0.len())
|
&**self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -106,7 +158,7 @@ impl Bytes {
|
|||||||
/// The length in bytes.
|
/// The length in bytes.
|
||||||
#[func(title = "Length")]
|
#[func(title = "Length")]
|
||||||
pub fn len(&self) -> usize {
|
pub fn len(&self) -> usize {
|
||||||
self.0.len()
|
self.as_slice().len()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the byte at the specified index. Returns the default value if
|
/// Returns the byte at the specified index. Returns the default value if
|
||||||
@ -122,13 +174,13 @@ impl Bytes {
|
|||||||
default: Option<Value>,
|
default: Option<Value>,
|
||||||
) -> StrResult<Value> {
|
) -> StrResult<Value> {
|
||||||
self.locate_opt(index)
|
self.locate_opt(index)
|
||||||
.and_then(|i| self.0.get(i).map(|&b| Value::Int(b.into())))
|
.and_then(|i| self.as_slice().get(i).map(|&b| Value::Int(b.into())))
|
||||||
.or(default)
|
.or(default)
|
||||||
.ok_or_else(|| out_of_bounds_no_default(index, self.len()))
|
.ok_or_else(|| out_of_bounds_no_default(index, self.len()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts a subslice of the bytes. Fails with an error if the start or end
|
/// Extracts a subslice of the bytes. Fails with an error if the start or
|
||||||
/// index is out of bounds.
|
/// end index is out of bounds.
|
||||||
#[func]
|
#[func]
|
||||||
pub fn slice(
|
pub fn slice(
|
||||||
&self,
|
&self,
|
||||||
@ -148,9 +200,17 @@ impl Bytes {
|
|||||||
if end.is_none() {
|
if end.is_none() {
|
||||||
end = count.map(|c: i64| start + c);
|
end = count.map(|c: i64| start + c);
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = self.locate(start)?;
|
let start = self.locate(start)?;
|
||||||
let end = self.locate(end.unwrap_or(self.len() as i64))?.max(start);
|
let end = self.locate(end.unwrap_or(self.len() as i64))?.max(start);
|
||||||
Ok(self.0[start..end].into())
|
let slice = &self.as_slice()[start..end];
|
||||||
|
|
||||||
|
// We could hold a view into the original bytes here instead of
|
||||||
|
// making a copy, but it's unclear when that's worth it. Java
|
||||||
|
// originally did that for strings, but went back on it because a
|
||||||
|
// very small view into a very large buffer would be a sort of
|
||||||
|
// memory leak.
|
||||||
|
Ok(Bytes::new(slice.to_vec()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -170,7 +230,15 @@ impl Deref for Bytes {
|
|||||||
type Target = [u8];
|
type Target = [u8];
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
&self.0
|
self.inner().as_bytes()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eq for Bytes {}
|
||||||
|
|
||||||
|
impl PartialEq for Bytes {
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
self.0.eq(&other.0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,18 +248,6 @@ impl AsRef<[u8]> for Bytes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&[u8]> for Bytes {
|
|
||||||
fn from(slice: &[u8]) -> Self {
|
|
||||||
Self(Arc::new(LazyHash::new(slice.to_vec().into())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Vec<u8>> for Bytes {
|
|
||||||
fn from(vec: Vec<u8>) -> Self {
|
|
||||||
Self(Arc::new(LazyHash::new(vec.into())))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Add for Bytes {
|
impl Add for Bytes {
|
||||||
type Output = Self;
|
type Output = Self;
|
||||||
|
|
||||||
@ -207,10 +263,12 @@ impl AddAssign for Bytes {
|
|||||||
// Nothing to do
|
// Nothing to do
|
||||||
} else if self.is_empty() {
|
} else if self.is_empty() {
|
||||||
*self = rhs;
|
*self = rhs;
|
||||||
} else if Arc::strong_count(&self.0) == 1 && matches!(**self.0, Cow::Owned(_)) {
|
} else if let Some(vec) = Arc::get_mut(&mut self.0)
|
||||||
Arc::make_mut(&mut self.0).to_mut().extend_from_slice(&rhs);
|
.and_then(|unique| unique.as_any_mut().downcast_mut::<Vec<u8>>())
|
||||||
|
{
|
||||||
|
vec.extend_from_slice(&rhs);
|
||||||
} else {
|
} else {
|
||||||
*self = Self::from([self.as_slice(), rhs.as_slice()].concat());
|
*self = Self::new([self.as_slice(), rhs.as_slice()].concat());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -228,20 +286,79 @@ impl Serialize for Bytes {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Any type that can back a byte buffer.
|
||||||
|
trait Bytelike: Send + Sync {
|
||||||
|
fn as_bytes(&self) -> &[u8];
|
||||||
|
fn as_str(&self) -> Result<&str, Utf8Error>;
|
||||||
|
fn as_any(&self) -> &dyn Any;
|
||||||
|
fn as_any_mut(&mut self) -> &mut dyn Any;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Bytelike for T
|
||||||
|
where
|
||||||
|
T: AsRef<[u8]> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn as_bytes(&self) -> &[u8] {
|
||||||
|
self.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_str(&self) -> Result<&str, Utf8Error> {
|
||||||
|
std::str::from_utf8(self.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any(&self) -> &dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any_mut(&mut self) -> &mut dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hash for dyn Bytelike {
|
||||||
|
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||||
|
self.as_bytes().hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Makes string-like objects usable with `Bytes`.
|
||||||
|
struct StrWrapper<T>(T);
|
||||||
|
|
||||||
|
impl<T> Bytelike for StrWrapper<T>
|
||||||
|
where
|
||||||
|
T: AsRef<str> + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
fn as_bytes(&self) -> &[u8] {
|
||||||
|
self.0.as_ref().as_bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_str(&self) -> Result<&str, Utf8Error> {
|
||||||
|
Ok(self.0.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any(&self) -> &dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn as_any_mut(&mut self) -> &mut dyn Any {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A value that can be cast to bytes.
|
/// A value that can be cast to bytes.
|
||||||
pub struct ToBytes(Bytes);
|
pub struct ToBytes(Bytes);
|
||||||
|
|
||||||
cast! {
|
cast! {
|
||||||
ToBytes,
|
ToBytes,
|
||||||
v: Str => Self(v.as_bytes().into()),
|
v: Str => Self(Bytes::from_string(v)),
|
||||||
v: Array => Self(v.iter()
|
v: Array => Self(v.iter()
|
||||||
.map(|item| match item {
|
.map(|item| match item {
|
||||||
Value::Int(byte @ 0..=255) => Ok(*byte as u8),
|
Value::Int(byte @ 0..=255) => Ok(*byte as u8),
|
||||||
Value::Int(_) => bail!("number must be between 0 and 255"),
|
Value::Int(_) => bail!("number must be between 0 and 255"),
|
||||||
value => Err(<u8 as Reflect>::error(value)),
|
value => Err(<u8 as Reflect>::error(value)),
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<u8>, _>>()?
|
.collect::<Result<Vec<u8>, _>>()
|
||||||
.into()
|
.map(Bytes::new)?
|
||||||
),
|
),
|
||||||
v: Bytes => Self(v),
|
v: Bytes => Self(v),
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,6 @@ cast! {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Power")]
|
#[func(title = "Power")]
|
||||||
pub fn pow(
|
pub fn pow(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The base of the power.
|
/// The base of the power.
|
||||||
///
|
///
|
||||||
@ -159,7 +158,6 @@ pub fn pow(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Exponential")]
|
#[func(title = "Exponential")]
|
||||||
pub fn exp(
|
pub fn exp(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The exponent of the power.
|
/// The exponent of the power.
|
||||||
exponent: Spanned<Num>,
|
exponent: Spanned<Num>,
|
||||||
@ -412,7 +410,6 @@ pub fn tanh(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Logarithm")]
|
#[func(title = "Logarithm")]
|
||||||
pub fn log(
|
pub fn log(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The number whose logarithm to calculate. Must be strictly positive.
|
/// The number whose logarithm to calculate. Must be strictly positive.
|
||||||
value: Spanned<Num>,
|
value: Spanned<Num>,
|
||||||
@ -454,7 +451,6 @@ pub fn log(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Natural Logarithm")]
|
#[func(title = "Natural Logarithm")]
|
||||||
pub fn ln(
|
pub fn ln(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The number whose logarithm to calculate. Must be strictly positive.
|
/// The number whose logarithm to calculate. Must be strictly positive.
|
||||||
value: Spanned<Num>,
|
value: Spanned<Num>,
|
||||||
@ -782,7 +778,6 @@ pub fn round(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn clamp(
|
pub fn clamp(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The number to clamp.
|
/// The number to clamp.
|
||||||
value: DecNum,
|
value: DecNum,
|
||||||
@ -815,7 +810,6 @@ pub fn clamp(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Minimum")]
|
#[func(title = "Minimum")]
|
||||||
pub fn min(
|
pub fn min(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The sequence of values from which to extract the minimum.
|
/// The sequence of values from which to extract the minimum.
|
||||||
/// Must not be empty.
|
/// Must not be empty.
|
||||||
@ -833,7 +827,6 @@ pub fn min(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Maximum")]
|
#[func(title = "Maximum")]
|
||||||
pub fn max(
|
pub fn max(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The sequence of values from which to extract the maximum.
|
/// The sequence of values from which to extract the maximum.
|
||||||
/// Must not be empty.
|
/// Must not be empty.
|
||||||
@ -911,7 +904,6 @@ pub fn odd(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Remainder")]
|
#[func(title = "Remainder")]
|
||||||
pub fn rem(
|
pub fn rem(
|
||||||
/// The span of the function call.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The dividend of the remainder.
|
/// The dividend of the remainder.
|
||||||
dividend: DecNum,
|
dividend: DecNum,
|
||||||
@ -950,7 +942,6 @@ pub fn rem(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Euclidean Division")]
|
#[func(title = "Euclidean Division")]
|
||||||
pub fn div_euclid(
|
pub fn div_euclid(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The dividend of the division.
|
/// The dividend of the division.
|
||||||
dividend: DecNum,
|
dividend: DecNum,
|
||||||
@ -994,7 +985,6 @@ pub fn div_euclid(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Euclidean Remainder", keywords = ["modulo", "modulus"])]
|
#[func(title = "Euclidean Remainder", keywords = ["modulo", "modulus"])]
|
||||||
pub fn rem_euclid(
|
pub fn rem_euclid(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The dividend of the remainder.
|
/// The dividend of the remainder.
|
||||||
dividend: DecNum,
|
dividend: DecNum,
|
||||||
@ -1031,7 +1021,6 @@ pub fn rem_euclid(
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Quotient")]
|
#[func(title = "Quotient")]
|
||||||
pub fn quo(
|
pub fn quo(
|
||||||
/// The span of the function call.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The dividend of the quotient.
|
/// The dividend of the quotient.
|
||||||
dividend: DecNum,
|
dividend: DecNum,
|
||||||
|
@ -13,7 +13,9 @@ use typst_syntax::{Span, Spanned};
|
|||||||
use unicode_math_class::MathClass;
|
use unicode_math_class::MathClass;
|
||||||
|
|
||||||
use crate::diag::{At, HintedStrResult, HintedString, SourceResult, StrResult};
|
use crate::diag::{At, HintedStrResult, HintedString, SourceResult, StrResult};
|
||||||
use crate::foundations::{array, repr, NativeElement, Packed, Repr, Str, Type, Value};
|
use crate::foundations::{
|
||||||
|
array, repr, Fold, NativeElement, Packed, Repr, Str, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
/// Determine details of a type.
|
/// Determine details of a type.
|
||||||
///
|
///
|
||||||
@ -497,3 +499,58 @@ cast! {
|
|||||||
/// An operator that can be both unary or binary like `+`.
|
/// An operator that can be both unary or binary like `+`.
|
||||||
"vary" => MathClass::Vary,
|
"vary" => MathClass::Vary,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A type that contains a user-visible source portion and something that is
|
||||||
|
/// derived from it, but not user-visible.
|
||||||
|
///
|
||||||
|
/// An example usage would be `source` being a `DataSource` and `derived` a
|
||||||
|
/// TextMate theme parsed from it. With `Derived`, we can store both parts in
|
||||||
|
/// the `RawElem::theme` field and get automatic nice `Reflect` and `IntoValue`
|
||||||
|
/// impls.
|
||||||
|
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Hash)]
|
||||||
|
pub struct Derived<S, D> {
|
||||||
|
/// The source portion.
|
||||||
|
pub source: S,
|
||||||
|
/// The derived portion.
|
||||||
|
pub derived: D,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S, D> Derived<S, D> {
|
||||||
|
/// Create a new instance from the `source` and the `derived` data.
|
||||||
|
pub fn new(source: S, derived: D) -> Self {
|
||||||
|
Self { source, derived }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Reflect, D> Reflect for Derived<S, D> {
|
||||||
|
fn input() -> CastInfo {
|
||||||
|
S::input()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output() -> CastInfo {
|
||||||
|
S::output()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn castable(value: &Value) -> bool {
|
||||||
|
S::castable(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn error(found: &Value) -> HintedString {
|
||||||
|
S::error(found)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: IntoValue, D> IntoValue for Derived<S, D> {
|
||||||
|
fn into_value(self) -> Value {
|
||||||
|
self.source.into_value()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Fold, D: Fold> Fold for Derived<S, D> {
|
||||||
|
fn fold(self, outer: Self) -> Self {
|
||||||
|
Self {
|
||||||
|
source: self.source.fold(outer.source),
|
||||||
|
derived: self.derived.fold(outer.derived),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -318,7 +318,6 @@ impl Datetime {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn today(
|
pub fn today(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// An offset to apply to the current UTC date. If set to `{auto}`, the
|
/// An offset to apply to the current UTC date. If set to `{auto}`, the
|
||||||
/// offset will be the local offset.
|
/// offset will be the local offset.
|
||||||
|
@ -163,18 +163,14 @@ impl f64 {
|
|||||||
size: u32,
|
size: u32,
|
||||||
) -> StrResult<Bytes> {
|
) -> StrResult<Bytes> {
|
||||||
Ok(match size {
|
Ok(match size {
|
||||||
8 => match endian {
|
8 => Bytes::new(match endian {
|
||||||
Endianness::Little => self.to_le_bytes(),
|
Endianness::Little => self.to_le_bytes(),
|
||||||
Endianness::Big => self.to_be_bytes(),
|
Endianness::Big => self.to_be_bytes(),
|
||||||
}
|
}),
|
||||||
.as_slice()
|
4 => Bytes::new(match endian {
|
||||||
.into(),
|
|
||||||
4 => match endian {
|
|
||||||
Endianness::Little => (self as f32).to_le_bytes(),
|
Endianness::Little => (self as f32).to_le_bytes(),
|
||||||
Endianness::Big => (self as f32).to_be_bytes(),
|
Endianness::Big => (self as f32).to_be_bytes(),
|
||||||
}
|
}),
|
||||||
.as_slice()
|
|
||||||
.into(),
|
|
||||||
_ => bail!("size must be either 4 or 8"),
|
_ => bail!("size must be either 4 or 8"),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -334,8 +334,6 @@ impl Func {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn with(
|
pub fn with(
|
||||||
self,
|
self,
|
||||||
/// The real arguments (the other argument is just for the docs).
|
|
||||||
/// The docs argument cannot be called `args`.
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The arguments to apply to the function.
|
/// The arguments to apply to the function.
|
||||||
#[external]
|
#[external]
|
||||||
@ -361,8 +359,6 @@ impl Func {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn where_(
|
pub fn where_(
|
||||||
self,
|
self,
|
||||||
/// The real arguments (the other argument is just for the docs).
|
|
||||||
/// The docs argument cannot be called `args`.
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The fields to filter for.
|
/// The fields to filter for.
|
||||||
#[variadic]
|
#[variadic]
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use std::num::{NonZeroI64, NonZeroIsize, NonZeroU64, NonZeroUsize, ParseIntError};
|
use std::num::{NonZeroI64, NonZeroIsize, NonZeroU64, NonZeroUsize, ParseIntError};
|
||||||
|
|
||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use crate::diag::{bail, StrResult};
|
use crate::diag::{bail, StrResult};
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
@ -322,7 +323,7 @@ impl i64 {
|
|||||||
Endianness::Little => self.to_le_bytes(),
|
Endianness::Little => self.to_le_bytes(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut buf = vec![0u8; size];
|
let mut buf = SmallVec::<[u8; 8]>::from_elem(0, size);
|
||||||
match endian {
|
match endian {
|
||||||
Endianness::Big => {
|
Endianness::Big => {
|
||||||
// Copy the bytes from the array to the buffer, starting from
|
// Copy the bytes from the array to the buffer, starting from
|
||||||
@ -339,7 +340,7 @@ impl i64 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Bytes::from(buf)
|
Bytes::new(buf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -266,7 +266,6 @@ impl assert {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Evaluate")]
|
#[func(title = "Evaluate")]
|
||||||
pub fn eval(
|
pub fn eval(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// A string of Typst code to evaluate.
|
/// A string of Typst code to evaluate.
|
||||||
source: Spanned<String>,
|
source: Spanned<String>,
|
||||||
|
@ -9,7 +9,7 @@ use wasmi::{AsContext, AsContextMut};
|
|||||||
use crate::diag::{bail, At, SourceResult, StrResult};
|
use crate::diag::{bail, At, SourceResult, StrResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, repr, scope, ty, Bytes};
|
use crate::foundations::{func, repr, scope, ty, Bytes};
|
||||||
use crate::World;
|
use crate::loading::{DataSource, Load};
|
||||||
|
|
||||||
/// A WebAssembly plugin.
|
/// A WebAssembly plugin.
|
||||||
///
|
///
|
||||||
@ -152,17 +152,14 @@ impl Plugin {
|
|||||||
/// Creates a new plugin from a WebAssembly file.
|
/// Creates a new plugin from a WebAssembly file.
|
||||||
#[func(constructor)]
|
#[func(constructor)]
|
||||||
pub fn construct(
|
pub fn construct(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a WebAssembly file.
|
/// A path to a WebAssembly file or raw WebAssembly bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Plugin> {
|
) -> SourceResult<Plugin> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
Plugin::new(data).at(source.span)
|
||||||
let data = engine.world.file(id).at(span)?;
|
|
||||||
Plugin::new(data).at(span)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -293,7 +290,7 @@ impl Plugin {
|
|||||||
_ => bail!("plugin did not respect the protocol"),
|
_ => bail!("plugin did not respect the protocol"),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(output.into())
|
Ok(Bytes::new(output))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator over all the function names defined by the plugin.
|
/// An iterator over all the function names defined by the plugin.
|
||||||
|
@ -425,9 +425,7 @@ impl Str {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn replace(
|
pub fn replace(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The pattern to search for.
|
/// The pattern to search for.
|
||||||
pattern: StrPattern,
|
pattern: StrPattern,
|
||||||
@ -784,11 +782,7 @@ cast! {
|
|||||||
v: f64 => Self::Str(repr::display_float(v).into()),
|
v: f64 => Self::Str(repr::display_float(v).into()),
|
||||||
v: Decimal => Self::Str(format_str!("{}", v)),
|
v: Decimal => Self::Str(format_str!("{}", v)),
|
||||||
v: Version => Self::Str(format_str!("{}", v)),
|
v: Version => Self::Str(format_str!("{}", v)),
|
||||||
v: Bytes => Self::Str(
|
v: Bytes => Self::Str(v.to_str().map_err(|_| "bytes are not valid utf-8")?),
|
||||||
std::str::from_utf8(&v)
|
|
||||||
.map_err(|_| "bytes are not valid utf-8")?
|
|
||||||
.into()
|
|
||||||
),
|
|
||||||
v: Label => Self::Str(v.resolve().as_str().into()),
|
v: Label => Self::Str(v.resolve().as_str().into()),
|
||||||
v: Type => Self::Str(v.long_name().into()),
|
v: Type => Self::Str(v.long_name().into()),
|
||||||
v: Str => Self::Str(v),
|
v: Str => Self::Str(v),
|
||||||
|
@ -12,7 +12,8 @@ use typst_utils::LazyHash;
|
|||||||
use crate::diag::{SourceResult, Trace, Tracepoint};
|
use crate::diag::{SourceResult, Trace, Tracepoint};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, ty, Content, Context, Element, Func, NativeElement, Repr, Selector,
|
cast, ty, Content, Context, Element, Func, NativeElement, OneOrMultiple, Repr,
|
||||||
|
Selector,
|
||||||
};
|
};
|
||||||
use crate::text::{FontFamily, FontList, TextElem};
|
use crate::text::{FontFamily, FontList, TextElem};
|
||||||
|
|
||||||
@ -939,6 +940,13 @@ impl<T, const N: usize> Fold for SmallVec<[T; N]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T> Fold for OneOrMultiple<T> {
|
||||||
|
fn fold(self, mut outer: Self) -> Self {
|
||||||
|
outer.0.extend(self.0);
|
||||||
|
outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A variant of fold for foldable optional (`Option<T>`) values where an inner
|
/// A variant of fold for foldable optional (`Option<T>`) values where an inner
|
||||||
/// `None` value isn't respected (contrary to `Option`'s usual `Fold`
|
/// `None` value isn't respected (contrary to `Option`'s usual `Fold`
|
||||||
/// implementation, with which folding with an inner `None` always returns
|
/// implementation, with which folding with an inner `None` always returns
|
||||||
|
@ -187,7 +187,6 @@ impl Symbol {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(constructor)]
|
#[func(constructor)]
|
||||||
pub fn construct(
|
pub fn construct(
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The variants of the symbol.
|
/// The variants of the symbol.
|
||||||
///
|
///
|
||||||
|
@ -30,9 +30,6 @@ pub struct TargetElem {
|
|||||||
|
|
||||||
/// Returns the current compilation target.
|
/// Returns the current compilation target.
|
||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn target(
|
pub fn target(context: Tracked<Context>) -> HintedStrResult<Target> {
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
|
||||||
) -> HintedStrResult<Target> {
|
|
||||||
Ok(TargetElem::target_in(context.styles()?))
|
Ok(TargetElem::target_in(context.styles()?))
|
||||||
}
|
}
|
||||||
|
@ -459,15 +459,15 @@ impl<'de> Visitor<'de> for ValueVisitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
|
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
|
||||||
Ok(Bytes::from(v).into_value())
|
Ok(Bytes::new(v.to_vec()).into_value())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_borrowed_bytes<E: Error>(self, v: &'de [u8]) -> Result<Self::Value, E> {
|
fn visit_borrowed_bytes<E: Error>(self, v: &'de [u8]) -> Result<Self::Value, E> {
|
||||||
Ok(Bytes::from(v).into_value())
|
Ok(Bytes::new(v.to_vec()).into_value())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_byte_buf<E: Error>(self, v: Vec<u8>) -> Result<Self::Value, E> {
|
fn visit_byte_buf<E: Error>(self, v: Vec<u8>) -> Result<Self::Value, E> {
|
||||||
Ok(Bytes::from(v).into_value())
|
Ok(Bytes::new(v).into_value())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_none<E: Error>(self) -> Result<Self::Value, E> {
|
fn visit_none<E: Error>(self) -> Result<Self::Value, E> {
|
||||||
|
@ -428,11 +428,8 @@ impl Counter {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn get(
|
pub fn get(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> SourceResult<CounterState> {
|
) -> SourceResult<CounterState> {
|
||||||
let loc = context.location().at(span)?;
|
let loc = context.location().at(span)?;
|
||||||
@ -444,11 +441,8 @@ impl Counter {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn display(
|
pub fn display(
|
||||||
self,
|
self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The call span of the display.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// A [numbering pattern or a function]($numbering), which specifies how
|
/// A [numbering pattern or a function]($numbering), which specifies how
|
||||||
/// to display the counter. If given a function, that function receives
|
/// to display the counter. If given a function, that function receives
|
||||||
@ -482,11 +476,8 @@ impl Counter {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn at(
|
pub fn at(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The place at which the counter's value should be retrieved.
|
/// The place at which the counter's value should be retrieved.
|
||||||
selector: LocatableSelector,
|
selector: LocatableSelector,
|
||||||
@ -500,11 +491,8 @@ impl Counter {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn final_(
|
pub fn final_(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> SourceResult<CounterState> {
|
) -> SourceResult<CounterState> {
|
||||||
context.introspect().at(span)?;
|
context.introspect().at(span)?;
|
||||||
@ -528,7 +516,6 @@ impl Counter {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn step(
|
pub fn step(
|
||||||
self,
|
self,
|
||||||
/// The call span of the update.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The depth at which to step the counter. Defaults to `{1}`.
|
/// The depth at which to step the counter. Defaults to `{1}`.
|
||||||
#[named]
|
#[named]
|
||||||
@ -545,7 +532,6 @@ impl Counter {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn update(
|
pub fn update(
|
||||||
self,
|
self,
|
||||||
/// The call span of the update.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// If given an integer or array of integers, sets the counter to that
|
/// If given an integer or array of integers, sets the counter to that
|
||||||
/// value. If given a function, that function receives the previous
|
/// value. If given a function, that function receives the previous
|
||||||
@ -800,7 +786,7 @@ impl ManualPageCounter {
|
|||||||
let Some(elem) = elem.to_packed::<CounterUpdateElem>() else {
|
let Some(elem) = elem.to_packed::<CounterUpdateElem>() else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
if *elem.key() == CounterKey::Page {
|
if elem.key == CounterKey::Page {
|
||||||
let mut state = CounterState(smallvec![self.logical]);
|
let mut state = CounterState(smallvec![self.logical]);
|
||||||
state.update(engine, elem.update.clone())?;
|
state.update(engine, elem.update.clone())?;
|
||||||
self.logical = state.first();
|
self.logical = state.first();
|
||||||
|
@ -44,9 +44,6 @@ use crate::introspection::Location;
|
|||||||
/// ```
|
/// ```
|
||||||
/// Refer to the [`selector`] type for more details on before/after selectors.
|
/// Refer to the [`selector`] type for more details on before/after selectors.
|
||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn here(
|
pub fn here(context: Tracked<Context>) -> HintedStrResult<Location> {
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
|
||||||
) -> HintedStrResult<Location> {
|
|
||||||
context.location()
|
context.location()
|
||||||
}
|
}
|
||||||
|
@ -24,9 +24,7 @@ use crate::introspection::Location;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn locate(
|
pub fn locate(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// A selector that should match exactly one element. This element will be
|
/// A selector that should match exactly one element. This element will be
|
||||||
/// located.
|
/// located.
|
||||||
|
@ -136,9 +136,7 @@ use crate::foundations::{func, Array, Context, LocatableSelector, Value};
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn query(
|
pub fn query(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// Can be
|
/// Can be
|
||||||
/// - an element function like a `heading` or `figure`,
|
/// - an element function like a `heading` or `figure`,
|
||||||
|
@ -245,7 +245,7 @@ impl State {
|
|||||||
|
|
||||||
for elem in introspector.query(&self.selector()) {
|
for elem in introspector.query(&self.selector()) {
|
||||||
let elem = elem.to_packed::<StateUpdateElem>().unwrap();
|
let elem = elem.to_packed::<StateUpdateElem>().unwrap();
|
||||||
match elem.update() {
|
match &elem.update {
|
||||||
StateUpdate::Set(value) => state = value.clone(),
|
StateUpdate::Set(value) => state = value.clone(),
|
||||||
StateUpdate::Func(func) => {
|
StateUpdate::Func(func) => {
|
||||||
state = func.call(&mut engine, Context::none().track(), [state])?
|
state = func.call(&mut engine, Context::none().track(), [state])?
|
||||||
@ -289,11 +289,8 @@ impl State {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn get(
|
pub fn get(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let loc = context.location().at(span)?;
|
let loc = context.location().at(span)?;
|
||||||
@ -309,11 +306,8 @@ impl State {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn at(
|
pub fn at(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The place at which the state's value should be retrieved.
|
/// The place at which the state's value should be retrieved.
|
||||||
selector: LocatableSelector,
|
selector: LocatableSelector,
|
||||||
@ -326,11 +320,8 @@ impl State {
|
|||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn final_(
|
pub fn final_(
|
||||||
&self,
|
&self,
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
context.introspect().at(span)?;
|
context.introspect().at(span)?;
|
||||||
@ -349,7 +340,6 @@ impl State {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn update(
|
pub fn update(
|
||||||
self,
|
self,
|
||||||
/// The span of the `update` call.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// If given a non function-value, sets the state to that value. If
|
/// If given a non function-value, sets the state to that value. If
|
||||||
/// given a function, that function receives the previous state and has
|
/// given a function, that function receives the previous state and has
|
||||||
|
@ -100,7 +100,7 @@ pub struct AlignElem {
|
|||||||
impl Show for Packed<AlignElem> {
|
impl Show for Packed<AlignElem> {
|
||||||
#[typst_macros::time(name = "align", span = self.span())]
|
#[typst_macros::time(name = "align", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().aligned(self.alignment(styles)))
|
Ok(self.body.clone().aligned(self.alignment(styles)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ impl Packed<InlineElem> {
|
|||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
region: Size,
|
region: Size,
|
||||||
) -> SourceResult<Vec<InlineItem>> {
|
) -> SourceResult<Vec<InlineItem>> {
|
||||||
self.body().call(engine, locator, styles, region)
|
self.body.call(engine, locator, styles, region)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -749,7 +749,7 @@ cast! {
|
|||||||
|
|
||||||
impl Show for Packed<GridCell> {
|
impl Show for Packed<GridCell> {
|
||||||
fn show(&self, _engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
show_grid_cell(self.body().clone(), self.inset(styles), self.align(styles))
|
show_grid_cell(self.body.clone(), self.inset(styles), self.align(styles))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -42,16 +42,16 @@ pub fn grid_to_cellgrid<'a>(
|
|||||||
// Use trace to link back to the grid when a specific cell errors
|
// Use trace to link back to the grid when a specific cell errors
|
||||||
let tracepoint = || Tracepoint::Call(Some(eco_format!("grid")));
|
let tracepoint = || Tracepoint::Call(Some(eco_format!("grid")));
|
||||||
let resolve_item = |item: &GridItem| grid_item_to_resolvable(item, styles);
|
let resolve_item = |item: &GridItem| grid_item_to_resolvable(item, styles);
|
||||||
let children = elem.children().iter().map(|child| match child {
|
let children = elem.children.iter().map(|child| match child {
|
||||||
GridChild::Header(header) => ResolvableGridChild::Header {
|
GridChild::Header(header) => ResolvableGridChild::Header {
|
||||||
repeat: header.repeat(styles),
|
repeat: header.repeat(styles),
|
||||||
span: header.span(),
|
span: header.span(),
|
||||||
items: header.children().iter().map(resolve_item),
|
items: header.children.iter().map(resolve_item),
|
||||||
},
|
},
|
||||||
GridChild::Footer(footer) => ResolvableGridChild::Footer {
|
GridChild::Footer(footer) => ResolvableGridChild::Footer {
|
||||||
repeat: footer.repeat(styles),
|
repeat: footer.repeat(styles),
|
||||||
span: footer.span(),
|
span: footer.span(),
|
||||||
items: footer.children().iter().map(resolve_item),
|
items: footer.children.iter().map(resolve_item),
|
||||||
},
|
},
|
||||||
GridChild::Item(item) => {
|
GridChild::Item(item) => {
|
||||||
ResolvableGridChild::Item(grid_item_to_resolvable(item, styles))
|
ResolvableGridChild::Item(grid_item_to_resolvable(item, styles))
|
||||||
@ -95,16 +95,16 @@ pub fn table_to_cellgrid<'a>(
|
|||||||
// Use trace to link back to the table when a specific cell errors
|
// Use trace to link back to the table when a specific cell errors
|
||||||
let tracepoint = || Tracepoint::Call(Some(eco_format!("table")));
|
let tracepoint = || Tracepoint::Call(Some(eco_format!("table")));
|
||||||
let resolve_item = |item: &TableItem| table_item_to_resolvable(item, styles);
|
let resolve_item = |item: &TableItem| table_item_to_resolvable(item, styles);
|
||||||
let children = elem.children().iter().map(|child| match child {
|
let children = elem.children.iter().map(|child| match child {
|
||||||
TableChild::Header(header) => ResolvableGridChild::Header {
|
TableChild::Header(header) => ResolvableGridChild::Header {
|
||||||
repeat: header.repeat(styles),
|
repeat: header.repeat(styles),
|
||||||
span: header.span(),
|
span: header.span(),
|
||||||
items: header.children().iter().map(resolve_item),
|
items: header.children.iter().map(resolve_item),
|
||||||
},
|
},
|
||||||
TableChild::Footer(footer) => ResolvableGridChild::Footer {
|
TableChild::Footer(footer) => ResolvableGridChild::Footer {
|
||||||
repeat: footer.repeat(styles),
|
repeat: footer.repeat(styles),
|
||||||
span: footer.span(),
|
span: footer.span(),
|
||||||
items: footer.children().iter().map(resolve_item),
|
items: footer.children.iter().map(resolve_item),
|
||||||
},
|
},
|
||||||
TableChild::Item(item) => {
|
TableChild::Item(item) => {
|
||||||
ResolvableGridChild::Item(table_item_to_resolvable(item, styles))
|
ResolvableGridChild::Item(table_item_to_resolvable(item, styles))
|
||||||
|
@ -29,6 +29,6 @@ pub struct HideElem {
|
|||||||
impl Show for Packed<HideElem> {
|
impl Show for Packed<HideElem> {
|
||||||
#[typst_macros::time(name = "hide", span = self.span())]
|
#[typst_macros::time(name = "hide", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(HideElem::set_hidden(true)))
|
Ok(self.body.clone().styled(HideElem::set_hidden(true)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,7 +54,6 @@ use crate::layout::{BlockElem, Size};
|
|||||||
/// corresponding page dimension is set to `{auto}`.
|
/// corresponding page dimension is set to `{auto}`.
|
||||||
#[func]
|
#[func]
|
||||||
pub fn layout(
|
pub fn layout(
|
||||||
/// The call span of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// A function to call with the outer container's size. Its return value is
|
/// A function to call with the outer container's size. Its return value is
|
||||||
/// displayed in the document.
|
/// displayed in the document.
|
||||||
@ -89,7 +88,7 @@ impl Show for Packed<LayoutElem> {
|
|||||||
let loc = elem.location().unwrap();
|
let loc = elem.location().unwrap();
|
||||||
let context = Context::new(Some(loc), Some(styles));
|
let context = Context::new(Some(loc), Some(styles));
|
||||||
let result = elem
|
let result = elem
|
||||||
.func()
|
.func
|
||||||
.call(
|
.call(
|
||||||
engine,
|
engine,
|
||||||
context.track(),
|
context.track(),
|
||||||
|
@ -43,11 +43,8 @@ use crate::layout::{Abs, Axes, Length, Region, Size};
|
|||||||
/// `height`, both of type [`length`].
|
/// `height`, both of type [`length`].
|
||||||
#[func(contextual)]
|
#[func(contextual)]
|
||||||
pub fn measure(
|
pub fn measure(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The width available to layout the content.
|
/// The width available to layout the content.
|
||||||
///
|
///
|
||||||
|
@ -21,6 +21,7 @@ pub mod layout;
|
|||||||
pub mod loading;
|
pub mod loading;
|
||||||
pub mod math;
|
pub mod math;
|
||||||
pub mod model;
|
pub mod model;
|
||||||
|
pub mod pdf;
|
||||||
pub mod routines;
|
pub mod routines;
|
||||||
pub mod symbols;
|
pub mod symbols;
|
||||||
pub mod text;
|
pub mod text;
|
||||||
@ -249,6 +250,7 @@ fn global(math: Module, inputs: Dict, features: &Features) -> Module {
|
|||||||
self::introspection::define(&mut global);
|
self::introspection::define(&mut global);
|
||||||
self::loading::define(&mut global);
|
self::loading::define(&mut global);
|
||||||
self::symbols::define(&mut global);
|
self::symbols::define(&mut global);
|
||||||
|
self::pdf::define(&mut global);
|
||||||
global.reset_category();
|
global.reset_category();
|
||||||
if features.is_enabled(Feature::Html) {
|
if features.is_enabled(Feature::Html) {
|
||||||
global.define_module(self::html::module());
|
global.define_module(self::html::module());
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::eco_format;
|
||||||
use typst_syntax::Spanned;
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult};
|
use crate::diag::{At, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, scope, Bytes, Value};
|
use crate::foundations::{func, scope, Bytes, Value};
|
||||||
use crate::World;
|
use crate::loading::{DataSource, Load};
|
||||||
|
|
||||||
/// Reads structured data from a CBOR file.
|
/// Reads structured data from a CBOR file.
|
||||||
///
|
///
|
||||||
@ -19,31 +19,31 @@ use crate::World;
|
|||||||
/// floating point numbers, which may result in an approximative value.
|
/// floating point numbers, which may result in an approximative value.
|
||||||
#[func(scope, title = "CBOR")]
|
#[func(scope, title = "CBOR")]
|
||||||
pub fn cbor(
|
pub fn cbor(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a CBOR file.
|
/// A path to a CBOR file or raw CBOR bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
ciborium::from_reader(data.as_slice())
|
||||||
let data = engine.world.file(id).at(span)?;
|
.map_err(|err| eco_format!("failed to parse CBOR ({err})"))
|
||||||
cbor::decode(Spanned::new(data, span))
|
.at(source.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl cbor {
|
impl cbor {
|
||||||
/// Reads structured data from CBOR bytes.
|
/// Reads structured data from CBOR bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`cbor`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode CBOR")]
|
#[func(title = "Decode CBOR")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
/// cbor data.
|
engine: &mut Engine,
|
||||||
|
/// CBOR data.
|
||||||
data: Spanned<Bytes>,
|
data: Spanned<Bytes>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: data, span } = data;
|
cbor(engine, data.map(DataSource::Bytes))
|
||||||
ciborium::from_reader(data.as_slice())
|
|
||||||
.map_err(|err| eco_format!("failed to parse CBOR ({err})"))
|
|
||||||
.at(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode structured data into CBOR bytes.
|
/// Encode structured data into CBOR bytes.
|
||||||
@ -55,7 +55,7 @@ impl cbor {
|
|||||||
let Spanned { v: value, span } = value;
|
let Spanned { v: value, span } = value;
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
ciborium::into_writer(&value, &mut res)
|
ciborium::into_writer(&value, &mut res)
|
||||||
.map(|_| res.into())
|
.map(|_| Bytes::new(res))
|
||||||
.map_err(|err| eco_format!("failed to encode value as CBOR ({err})"))
|
.map_err(|err| eco_format!("failed to encode value as CBOR ({err})"))
|
||||||
.at(span)
|
.at(span)
|
||||||
}
|
}
|
||||||
|
@ -4,8 +4,7 @@ use typst_syntax::Spanned;
|
|||||||
use crate::diag::{bail, At, SourceResult};
|
use crate::diag::{bail, At, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{cast, func, scope, Array, Dict, IntoValue, Type, Value};
|
use crate::foundations::{cast, func, scope, Array, Dict, IntoValue, Type, Value};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// Reads structured data from a CSV file.
|
/// Reads structured data from a CSV file.
|
||||||
///
|
///
|
||||||
@ -26,12 +25,11 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(scope, title = "CSV")]
|
#[func(scope, title = "CSV")]
|
||||||
pub fn csv(
|
pub fn csv(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a CSV file.
|
/// Path to a CSV file or raw CSV bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
/// The delimiter that separates columns in the CSV file.
|
/// The delimiter that separates columns in the CSV file.
|
||||||
/// Must be a single ASCII character.
|
/// Must be a single ASCII character.
|
||||||
#[named]
|
#[named]
|
||||||
@ -48,17 +46,62 @@ pub fn csv(
|
|||||||
#[default(RowType::Array)]
|
#[default(RowType::Array)]
|
||||||
row_type: RowType,
|
row_type: RowType,
|
||||||
) -> SourceResult<Array> {
|
) -> SourceResult<Array> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
|
||||||
let data = engine.world.file(id).at(span)?;
|
let mut builder = ::csv::ReaderBuilder::new();
|
||||||
self::csv::decode(Spanned::new(Readable::Bytes(data), span), delimiter, row_type)
|
let has_headers = row_type == RowType::Dict;
|
||||||
|
builder.has_headers(has_headers);
|
||||||
|
builder.delimiter(delimiter.0 as u8);
|
||||||
|
|
||||||
|
// Counting lines from 1 by default.
|
||||||
|
let mut line_offset: usize = 1;
|
||||||
|
let mut reader = builder.from_reader(data.as_slice());
|
||||||
|
let mut headers: Option<::csv::StringRecord> = None;
|
||||||
|
|
||||||
|
if has_headers {
|
||||||
|
// Counting lines from 2 because we have a header.
|
||||||
|
line_offset += 1;
|
||||||
|
headers = Some(
|
||||||
|
reader
|
||||||
|
.headers()
|
||||||
|
.map_err(|err| format_csv_error(err, 1))
|
||||||
|
.at(source.span)?
|
||||||
|
.clone(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut array = Array::new();
|
||||||
|
for (line, result) in reader.records().enumerate() {
|
||||||
|
// Original solution was to use line from error, but that is
|
||||||
|
// incorrect with `has_headers` set to `false`. See issue:
|
||||||
|
// https://github.com/BurntSushi/rust-csv/issues/184
|
||||||
|
let line = line + line_offset;
|
||||||
|
let row = result.map_err(|err| format_csv_error(err, line)).at(source.span)?;
|
||||||
|
let item = if let Some(headers) = &headers {
|
||||||
|
let mut dict = Dict::new();
|
||||||
|
for (field, value) in headers.iter().zip(&row) {
|
||||||
|
dict.insert(field.into(), value.into_value());
|
||||||
|
}
|
||||||
|
dict.into_value()
|
||||||
|
} else {
|
||||||
|
let sub = row.into_iter().map(|field| field.into_value()).collect();
|
||||||
|
Value::Array(sub)
|
||||||
|
};
|
||||||
|
array.push(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(array)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl csv {
|
impl csv {
|
||||||
/// Reads structured data from a CSV string/bytes.
|
/// Reads structured data from a CSV string/bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`csv`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode CSV")]
|
#[func(title = "Decode CSV")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
|
engine: &mut Engine,
|
||||||
/// CSV data.
|
/// CSV data.
|
||||||
data: Spanned<Readable>,
|
data: Spanned<Readable>,
|
||||||
/// The delimiter that separates columns in the CSV file.
|
/// The delimiter that separates columns in the CSV file.
|
||||||
@ -77,51 +120,7 @@ impl csv {
|
|||||||
#[default(RowType::Array)]
|
#[default(RowType::Array)]
|
||||||
row_type: RowType,
|
row_type: RowType,
|
||||||
) -> SourceResult<Array> {
|
) -> SourceResult<Array> {
|
||||||
let Spanned { v: data, span } = data;
|
csv(engine, data.map(Readable::into_source), delimiter, row_type)
|
||||||
let has_headers = row_type == RowType::Dict;
|
|
||||||
|
|
||||||
let mut builder = ::csv::ReaderBuilder::new();
|
|
||||||
builder.has_headers(has_headers);
|
|
||||||
builder.delimiter(delimiter.0 as u8);
|
|
||||||
|
|
||||||
// Counting lines from 1 by default.
|
|
||||||
let mut line_offset: usize = 1;
|
|
||||||
let mut reader = builder.from_reader(data.as_slice());
|
|
||||||
let mut headers: Option<::csv::StringRecord> = None;
|
|
||||||
|
|
||||||
if has_headers {
|
|
||||||
// Counting lines from 2 because we have a header.
|
|
||||||
line_offset += 1;
|
|
||||||
headers = Some(
|
|
||||||
reader
|
|
||||||
.headers()
|
|
||||||
.map_err(|err| format_csv_error(err, 1))
|
|
||||||
.at(span)?
|
|
||||||
.clone(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut array = Array::new();
|
|
||||||
for (line, result) in reader.records().enumerate() {
|
|
||||||
// Original solution was to use line from error, but that is
|
|
||||||
// incorrect with `has_headers` set to `false`. See issue:
|
|
||||||
// https://github.com/BurntSushi/rust-csv/issues/184
|
|
||||||
let line = line + line_offset;
|
|
||||||
let row = result.map_err(|err| format_csv_error(err, line)).at(span)?;
|
|
||||||
let item = if let Some(headers) = &headers {
|
|
||||||
let mut dict = Dict::new();
|
|
||||||
for (field, value) in headers.iter().zip(&row) {
|
|
||||||
dict.insert(field.into(), value.into_value());
|
|
||||||
}
|
|
||||||
dict.into_value()
|
|
||||||
} else {
|
|
||||||
let sub = row.into_iter().map(|field| field.into_value()).collect();
|
|
||||||
Value::Array(sub)
|
|
||||||
};
|
|
||||||
array.push(item);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(array)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::eco_format;
|
||||||
use typst_syntax::Spanned;
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult};
|
use crate::diag::{At, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, scope, Str, Value};
|
use crate::foundations::{func, scope, Str, Value};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// Reads structured data from a JSON file.
|
/// Reads structured data from a JSON file.
|
||||||
///
|
///
|
||||||
@ -51,31 +50,31 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(scope, title = "JSON")]
|
#[func(scope, title = "JSON")]
|
||||||
pub fn json(
|
pub fn json(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a JSON file.
|
/// Path to a JSON file or raw JSON bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
serde_json::from_slice(data.as_slice())
|
||||||
let data = engine.world.file(id).at(span)?;
|
.map_err(|err| eco_format!("failed to parse JSON ({err})"))
|
||||||
json::decode(Spanned::new(Readable::Bytes(data), span))
|
.at(source.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl json {
|
impl json {
|
||||||
/// Reads structured data from a JSON string/bytes.
|
/// Reads structured data from a JSON string/bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`json`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode JSON")]
|
#[func(title = "Decode JSON")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
|
engine: &mut Engine,
|
||||||
/// JSON data.
|
/// JSON data.
|
||||||
data: Spanned<Readable>,
|
data: Spanned<Readable>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: data, span } = data;
|
json(engine, data.map(Readable::into_source))
|
||||||
serde_json::from_slice(data.as_slice())
|
|
||||||
.map_err(|err| eco_format!("failed to parse JSON ({err})"))
|
|
||||||
.at(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes structured data into a JSON string.
|
/// Encodes structured data into a JSON string.
|
||||||
|
@ -15,6 +15,10 @@ mod xml_;
|
|||||||
#[path = "yaml.rs"]
|
#[path = "yaml.rs"]
|
||||||
mod yaml_;
|
mod yaml_;
|
||||||
|
|
||||||
|
use comemo::Tracked;
|
||||||
|
use ecow::EcoString;
|
||||||
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
pub use self::cbor_::*;
|
pub use self::cbor_::*;
|
||||||
pub use self::csv_::*;
|
pub use self::csv_::*;
|
||||||
pub use self::json_::*;
|
pub use self::json_::*;
|
||||||
@ -23,7 +27,10 @@ pub use self::toml_::*;
|
|||||||
pub use self::xml_::*;
|
pub use self::xml_::*;
|
||||||
pub use self::yaml_::*;
|
pub use self::yaml_::*;
|
||||||
|
|
||||||
|
use crate::diag::{At, SourceResult};
|
||||||
|
use crate::foundations::OneOrMultiple;
|
||||||
use crate::foundations::{cast, category, Bytes, Category, Scope, Str};
|
use crate::foundations::{cast, category, Bytes, Category, Scope, Str};
|
||||||
|
use crate::World;
|
||||||
|
|
||||||
/// Data loading from external files.
|
/// Data loading from external files.
|
||||||
///
|
///
|
||||||
@ -44,6 +51,76 @@ pub(super) fn define(global: &mut Scope) {
|
|||||||
global.define_func::<xml>();
|
global.define_func::<xml>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Something we can retrieve byte data from.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
|
pub enum DataSource {
|
||||||
|
/// A path to a file.
|
||||||
|
Path(EcoString),
|
||||||
|
/// Raw bytes.
|
||||||
|
Bytes(Bytes),
|
||||||
|
}
|
||||||
|
|
||||||
|
cast! {
|
||||||
|
DataSource,
|
||||||
|
self => match self {
|
||||||
|
Self::Path(v) => v.into_value(),
|
||||||
|
Self::Bytes(v) => v.into_value(),
|
||||||
|
},
|
||||||
|
v: EcoString => Self::Path(v),
|
||||||
|
v: Bytes => Self::Bytes(v),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Loads data from a path or provided bytes.
|
||||||
|
pub trait Load {
|
||||||
|
/// Bytes or a list of bytes (if there are multiple sources).
|
||||||
|
type Output;
|
||||||
|
|
||||||
|
/// Load the bytes.
|
||||||
|
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Self::Output>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Load for Spanned<DataSource> {
|
||||||
|
type Output = Bytes;
|
||||||
|
|
||||||
|
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Bytes> {
|
||||||
|
self.as_ref().load(world)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Load for Spanned<&DataSource> {
|
||||||
|
type Output = Bytes;
|
||||||
|
|
||||||
|
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Bytes> {
|
||||||
|
match &self.v {
|
||||||
|
DataSource::Path(path) => {
|
||||||
|
let file_id = self.span.resolve_path(path).at(self.span)?;
|
||||||
|
world.file(file_id).at(self.span)
|
||||||
|
}
|
||||||
|
DataSource::Bytes(bytes) => Ok(bytes.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Load for Spanned<OneOrMultiple<DataSource>> {
|
||||||
|
type Output = Vec<Bytes>;
|
||||||
|
|
||||||
|
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Vec<Bytes>> {
|
||||||
|
self.as_ref().load(world)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Load for Spanned<&OneOrMultiple<DataSource>> {
|
||||||
|
type Output = Vec<Bytes>;
|
||||||
|
|
||||||
|
fn load(&self, world: Tracked<dyn World + '_>) -> SourceResult<Vec<Bytes>> {
|
||||||
|
self.v
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.map(|source| Spanned::new(source, self.span).load(world))
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A value that can be read from a file.
|
/// A value that can be read from a file.
|
||||||
#[derive(Debug, Clone, PartialEq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
pub enum Readable {
|
pub enum Readable {
|
||||||
@ -54,18 +131,15 @@ pub enum Readable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Readable {
|
impl Readable {
|
||||||
pub fn as_slice(&self) -> &[u8] {
|
pub fn into_bytes(self) -> Bytes {
|
||||||
match self {
|
match self {
|
||||||
Readable::Bytes(v) => v,
|
Self::Bytes(v) => v,
|
||||||
Readable::Str(v) => v.as_bytes(),
|
Self::Str(v) => Bytes::from_string(v),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_str(&self) -> Option<&str> {
|
pub fn into_source(self) -> DataSource {
|
||||||
match self {
|
DataSource::Bytes(self.into_bytes())
|
||||||
Readable::Str(v) => Some(v.as_str()),
|
|
||||||
Readable::Bytes(v) => std::str::from_utf8(v).ok(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,12 +152,3 @@ cast! {
|
|||||||
v: Str => Self::Str(v),
|
v: Str => Self::Str(v),
|
||||||
v: Bytes => Self::Bytes(v),
|
v: Bytes => Self::Bytes(v),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Readable> for Bytes {
|
|
||||||
fn from(value: Readable) -> Self {
|
|
||||||
match value {
|
|
||||||
Readable::Bytes(v) => v,
|
|
||||||
Readable::Str(v) => v.as_bytes().into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use ecow::EcoString;
|
use ecow::EcoString;
|
||||||
use typst_syntax::Spanned;
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult};
|
use crate::diag::{At, FileError, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, Cast};
|
use crate::foundations::{func, Cast};
|
||||||
use crate::loading::Readable;
|
use crate::loading::Readable;
|
||||||
@ -24,7 +24,6 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn read(
|
pub fn read(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a file.
|
/// Path to a file.
|
||||||
///
|
///
|
||||||
@ -42,12 +41,9 @@ pub fn read(
|
|||||||
let data = engine.world.file(id).at(span)?;
|
let data = engine.world.file(id).at(span)?;
|
||||||
Ok(match encoding {
|
Ok(match encoding {
|
||||||
None => Readable::Bytes(data),
|
None => Readable::Bytes(data),
|
||||||
Some(Encoding::Utf8) => Readable::Str(
|
Some(Encoding::Utf8) => {
|
||||||
std::str::from_utf8(&data)
|
Readable::Str(data.to_str().map_err(FileError::from).at(span)?)
|
||||||
.map_err(|_| "file is not valid utf-8")
|
}
|
||||||
.at(span)?
|
|
||||||
.into(),
|
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::{eco_format, EcoString};
|
||||||
use typst_syntax::{is_newline, Spanned};
|
use typst_syntax::{is_newline, Spanned};
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult};
|
use crate::diag::{At, FileError, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, scope, Str, Value};
|
use crate::foundations::{func, scope, Str, Value};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// Reads structured data from a TOML file.
|
/// Reads structured data from a TOML file.
|
||||||
///
|
///
|
||||||
@ -29,34 +28,32 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(scope, title = "TOML")]
|
#[func(scope, title = "TOML")]
|
||||||
pub fn toml(
|
pub fn toml(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a TOML file.
|
/// A path to a TOML file or raw TOML bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
let raw = data.as_str().map_err(FileError::from).at(source.span)?;
|
||||||
let data = engine.world.file(id).at(span)?;
|
::toml::from_str(raw)
|
||||||
toml::decode(Spanned::new(Readable::Bytes(data), span))
|
.map_err(|err| format_toml_error(err, raw))
|
||||||
|
.at(source.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl toml {
|
impl toml {
|
||||||
/// Reads structured data from a TOML string/bytes.
|
/// Reads structured data from a TOML string/bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`toml`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode TOML")]
|
#[func(title = "Decode TOML")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
|
engine: &mut Engine,
|
||||||
/// TOML data.
|
/// TOML data.
|
||||||
data: Spanned<Readable>,
|
data: Spanned<Readable>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: data, span } = data;
|
toml(engine, data.map(Readable::into_source))
|
||||||
let raw = std::str::from_utf8(data.as_slice())
|
|
||||||
.map_err(|_| "file is not valid utf-8")
|
|
||||||
.at(span)?;
|
|
||||||
::toml::from_str(raw)
|
|
||||||
.map_err(|err| format_toml_error(err, raw))
|
|
||||||
.at(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encodes structured data into a TOML string.
|
/// Encodes structured data into a TOML string.
|
||||||
|
@ -5,8 +5,7 @@ use typst_syntax::Spanned;
|
|||||||
use crate::diag::{format_xml_like_error, At, FileError, SourceResult};
|
use crate::diag::{format_xml_like_error, At, FileError, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{dict, func, scope, Array, Dict, IntoValue, Str, Value};
|
use crate::foundations::{dict, func, scope, Array, Dict, IntoValue, Str, Value};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// Reads structured data from an XML file.
|
/// Reads structured data from an XML file.
|
||||||
///
|
///
|
||||||
@ -58,38 +57,36 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(scope, title = "XML")]
|
#[func(scope, title = "XML")]
|
||||||
pub fn xml(
|
pub fn xml(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to an XML file.
|
/// A path to an XML file or raw XML bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
let text = data.as_str().map_err(FileError::from).at(source.span)?;
|
||||||
let data = engine.world.file(id).at(span)?;
|
let document = roxmltree::Document::parse_with_options(
|
||||||
xml::decode(Spanned::new(Readable::Bytes(data), span))
|
text,
|
||||||
|
ParsingOptions { allow_dtd: true, ..Default::default() },
|
||||||
|
)
|
||||||
|
.map_err(format_xml_error)
|
||||||
|
.at(source.span)?;
|
||||||
|
Ok(convert_xml(document.root()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl xml {
|
impl xml {
|
||||||
/// Reads structured data from an XML string/bytes.
|
/// Reads structured data from an XML string/bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`xml`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode XML")]
|
#[func(title = "Decode XML")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
|
engine: &mut Engine,
|
||||||
/// XML data.
|
/// XML data.
|
||||||
data: Spanned<Readable>,
|
data: Spanned<Readable>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: data, span } = data;
|
xml(engine, data.map(Readable::into_source))
|
||||||
let text = std::str::from_utf8(data.as_slice())
|
|
||||||
.map_err(FileError::from)
|
|
||||||
.at(span)?;
|
|
||||||
let document = roxmltree::Document::parse_with_options(
|
|
||||||
text,
|
|
||||||
ParsingOptions { allow_dtd: true, ..Default::default() },
|
|
||||||
)
|
|
||||||
.map_err(format_xml_error)
|
|
||||||
.at(span)?;
|
|
||||||
Ok(convert_xml(document.root()))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
use ecow::{eco_format, EcoString};
|
use ecow::eco_format;
|
||||||
use typst_syntax::Spanned;
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult};
|
use crate::diag::{At, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{func, scope, Str, Value};
|
use crate::foundations::{func, scope, Str, Value};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::World;
|
|
||||||
|
|
||||||
/// Reads structured data from a YAML file.
|
/// Reads structured data from a YAML file.
|
||||||
///
|
///
|
||||||
@ -41,31 +40,31 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(scope, title = "YAML")]
|
#[func(scope, title = "YAML")]
|
||||||
pub fn yaml(
|
pub fn yaml(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// Path to a YAML file.
|
/// A path to a YAML file or raw YAML bytes.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
path: Spanned<EcoString>,
|
source: Spanned<DataSource>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: path, span } = path;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
serde_yaml::from_slice(data.as_slice())
|
||||||
let data = engine.world.file(id).at(span)?;
|
.map_err(|err| eco_format!("failed to parse YAML ({err})"))
|
||||||
yaml::decode(Spanned::new(Readable::Bytes(data), span))
|
.at(source.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[scope]
|
#[scope]
|
||||||
impl yaml {
|
impl yaml {
|
||||||
/// Reads structured data from a YAML string/bytes.
|
/// Reads structured data from a YAML string/bytes.
|
||||||
|
///
|
||||||
|
/// This function is deprecated. The [`yaml`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
#[func(title = "Decode YAML")]
|
#[func(title = "Decode YAML")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
|
engine: &mut Engine,
|
||||||
/// YAML data.
|
/// YAML data.
|
||||||
data: Spanned<Readable>,
|
data: Spanned<Readable>,
|
||||||
) -> SourceResult<Value> {
|
) -> SourceResult<Value> {
|
||||||
let Spanned { v: data, span } = data;
|
yaml(engine, data.map(Readable::into_source))
|
||||||
serde_yaml::from_slice(data.as_slice())
|
|
||||||
.map_err(|err| eco_format!("failed to parse YAML ({err})"))
|
|
||||||
.at(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Encode structured data into a YAML string.
|
/// Encode structured data into a YAML string.
|
||||||
|
@ -143,7 +143,7 @@ cast! {
|
|||||||
self => self.0.into_value(),
|
self => self.0.into_value(),
|
||||||
v: char => Self::new(v),
|
v: char => Self::new(v),
|
||||||
v: Content => match v.to_packed::<TextElem>() {
|
v: Content => match v.to_packed::<TextElem>() {
|
||||||
Some(elem) => Value::Str(elem.text().clone().into()).cast()?,
|
Some(elem) => Value::Str(elem.text.clone().into()).cast()?,
|
||||||
None => bail!("expected text"),
|
None => bail!("expected text"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -47,9 +47,9 @@ impl Packed<AttachElem> {
|
|||||||
/// base AttachElem where possible.
|
/// base AttachElem where possible.
|
||||||
pub fn merge_base(&self) -> Option<Self> {
|
pub fn merge_base(&self) -> Option<Self> {
|
||||||
// Extract from an EquationElem.
|
// Extract from an EquationElem.
|
||||||
let mut base = self.base();
|
let mut base = &self.base;
|
||||||
while let Some(equation) = base.to_packed::<EquationElem>() {
|
while let Some(equation) = base.to_packed::<EquationElem>() {
|
||||||
base = equation.body();
|
base = &equation.body;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Move attachments from elem into base where possible.
|
// Move attachments from elem into base where possible.
|
||||||
|
@ -82,8 +82,9 @@ use crate::text::TextElem;
|
|||||||
/// - Within them, Typst is still in "math mode". Thus, you can write math
|
/// - Within them, Typst is still in "math mode". Thus, you can write math
|
||||||
/// directly into them, but need to use hash syntax to pass code expressions
|
/// directly into them, but need to use hash syntax to pass code expressions
|
||||||
/// (except for strings, which are available in the math syntax).
|
/// (except for strings, which are available in the math syntax).
|
||||||
/// - They support positional and named arguments, but don't support trailing
|
/// - They support positional and named arguments, as well as argument
|
||||||
/// content blocks and argument spreading.
|
/// spreading.
|
||||||
|
/// - They don't support trailing content blocks.
|
||||||
/// - They provide additional syntax for 2-dimensional argument lists. The
|
/// - They provide additional syntax for 2-dimensional argument lists. The
|
||||||
/// semicolon (`;`) merges preceding arguments separated by commas into an
|
/// semicolon (`;`) merges preceding arguments separated by commas into an
|
||||||
/// array argument.
|
/// array argument.
|
||||||
@ -92,6 +93,7 @@ use crate::text::TextElem;
|
|||||||
/// $ frac(a^2, 2) $
|
/// $ frac(a^2, 2) $
|
||||||
/// $ vec(1, 2, delim: "[") $
|
/// $ vec(1, 2, delim: "[") $
|
||||||
/// $ mat(1, 2; 3, 4) $
|
/// $ mat(1, 2; 3, 4) $
|
||||||
|
/// $ mat(..#range(1, 5).chunks(2)) $
|
||||||
/// $ lim_x =
|
/// $ lim_x =
|
||||||
/// op("lim", limits: #true)_x $
|
/// op("lim", limits: #true)_x $
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -10,7 +10,6 @@ use crate::math::Mathy;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Square Root")]
|
#[func(title = "Square Root")]
|
||||||
pub fn sqrt(
|
pub fn sqrt(
|
||||||
/// The call span of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The expression to take the square root of.
|
/// The expression to take the square root of.
|
||||||
radicand: Content,
|
radicand: Content,
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
|
use std::any::TypeId;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::{Arc, LazyLock};
|
use std::sync::{Arc, LazyLock};
|
||||||
@ -12,26 +12,26 @@ use hayagriva::archive::ArchivedStyle;
|
|||||||
use hayagriva::io::BibLaTeXError;
|
use hayagriva::io::BibLaTeXError;
|
||||||
use hayagriva::{
|
use hayagriva::{
|
||||||
citationberg, BibliographyDriver, BibliographyRequest, CitationItem, CitationRequest,
|
citationberg, BibliographyDriver, BibliographyRequest, CitationItem, CitationRequest,
|
||||||
SpecificLocator,
|
Library, SpecificLocator,
|
||||||
};
|
};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use typed_arena::Arena;
|
|
||||||
use typst_syntax::{Span, Spanned};
|
use typst_syntax::{Span, Spanned};
|
||||||
use typst_utils::{LazyHash, NonZeroExt, PicoStr};
|
use typst_utils::{ManuallyHash, NonZeroExt, PicoStr};
|
||||||
|
|
||||||
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult};
|
use crate::diag::{bail, error, At, FileError, HintedStrResult, SourceResult, StrResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, elem, ty, Args, Array, Bytes, CastInfo, Content, FromValue, IntoValue, Label,
|
elem, Bytes, CastInfo, Content, Derived, FromValue, IntoValue, Label, NativeElement,
|
||||||
NativeElement, Packed, Reflect, Repr, Scope, Show, ShowSet, Smart, Str, StyleChain,
|
OneOrMultiple, Packed, Reflect, Scope, Show, ShowSet, Smart, StyleChain, Styles,
|
||||||
Styles, Synthesize, Type, Value,
|
Synthesize, Value,
|
||||||
};
|
};
|
||||||
use crate::introspection::{Introspector, Locatable, Location};
|
use crate::introspection::{Introspector, Locatable, Location};
|
||||||
use crate::layout::{
|
use crate::layout::{
|
||||||
BlockBody, BlockElem, Em, GridCell, GridChild, GridElem, GridItem, HElem, PadElem,
|
BlockBody, BlockElem, Em, GridCell, GridChild, GridElem, GridItem, HElem, PadElem,
|
||||||
Sizing, TrackSizings, VElem,
|
Sizing, TrackSizings, VElem,
|
||||||
};
|
};
|
||||||
|
use crate::loading::{DataSource, Load};
|
||||||
use crate::model::{
|
use crate::model::{
|
||||||
CitationForm, CiteGroup, Destination, FootnoteElem, HeadingElem, LinkElem, ParElem,
|
CitationForm, CiteGroup, Destination, FootnoteElem, HeadingElem, LinkElem, ParElem,
|
||||||
Url,
|
Url,
|
||||||
@ -86,13 +86,20 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[elem(Locatable, Synthesize, Show, ShowSet, LocalName)]
|
#[elem(Locatable, Synthesize, Show, ShowSet, LocalName)]
|
||||||
pub struct BibliographyElem {
|
pub struct BibliographyElem {
|
||||||
/// Path(s) to Hayagriva `.yml` and/or BibLaTeX `.bib` files.
|
/// One or multiple paths to or raw bytes for Hayagriva `.yml` and/or
|
||||||
|
/// BibLaTeX `.bib` files.
|
||||||
|
///
|
||||||
|
/// This can be a:
|
||||||
|
/// - A path string to load a bibliography file from the given path. For
|
||||||
|
/// more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
|
/// - Raw bytes from which the bibliography should be decoded.
|
||||||
|
/// - An array where each item is one the above.
|
||||||
#[required]
|
#[required]
|
||||||
#[parse(
|
#[parse(
|
||||||
let (paths, bibliography) = Bibliography::parse(engine, args)?;
|
let sources = args.expect("sources")?;
|
||||||
paths
|
Bibliography::load(engine.world, sources)?
|
||||||
)]
|
)]
|
||||||
pub path: BibliographyPaths,
|
pub sources: Derived<OneOrMultiple<DataSource>, Bibliography>,
|
||||||
|
|
||||||
/// The title of the bibliography.
|
/// The title of the bibliography.
|
||||||
///
|
///
|
||||||
@ -116,19 +123,22 @@ pub struct BibliographyElem {
|
|||||||
|
|
||||||
/// The bibliography style.
|
/// The bibliography style.
|
||||||
///
|
///
|
||||||
/// Should be either one of the built-in styles (see below) or a path to
|
/// This can be:
|
||||||
/// a [CSL file](https://citationstyles.org/). Some of the styles listed
|
/// - A string with the name of one of the built-in styles (see below). Some
|
||||||
/// below appear twice, once with their full name and once with a short
|
/// of the styles listed below appear twice, once with their full name and
|
||||||
/// alias.
|
/// once with a short alias.
|
||||||
#[parse(CslStyle::parse(engine, args)?)]
|
/// - A path string to a [CSL file](https://citationstyles.org/). For more
|
||||||
#[default(CslStyle::from_name("ieee").unwrap())]
|
/// details about paths, see the [Paths section]($syntax/#paths).
|
||||||
pub style: CslStyle,
|
/// - Raw bytes from which a CSL style should be decoded.
|
||||||
|
#[parse(match args.named::<Spanned<CslSource>>("style")? {
|
||||||
/// The loaded bibliography.
|
Some(source) => Some(CslStyle::load(engine.world, source)?),
|
||||||
#[internal]
|
None => None,
|
||||||
#[required]
|
})]
|
||||||
#[parse(bibliography)]
|
#[default({
|
||||||
pub bibliography: Bibliography,
|
let default = ArchivedStyle::InstituteOfElectricalAndElectronicsEngineers;
|
||||||
|
Derived::new(CslSource::Named(default), CslStyle::from_archived(default))
|
||||||
|
})]
|
||||||
|
pub style: Derived<CslSource, CslStyle>,
|
||||||
|
|
||||||
/// The language setting where the bibliography is.
|
/// The language setting where the bibliography is.
|
||||||
#[internal]
|
#[internal]
|
||||||
@ -141,17 +151,6 @@ pub struct BibliographyElem {
|
|||||||
pub region: Option<Region>,
|
pub region: Option<Region>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A list of bibliography file paths.
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash)]
|
|
||||||
pub struct BibliographyPaths(Vec<EcoString>);
|
|
||||||
|
|
||||||
cast! {
|
|
||||||
BibliographyPaths,
|
|
||||||
self => self.0.into_value(),
|
|
||||||
v: EcoString => Self(vec![v]),
|
|
||||||
v: Array => Self(v.into_iter().map(Value::cast).collect::<HintedStrResult<_>>()?),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BibliographyElem {
|
impl BibliographyElem {
|
||||||
/// Find the document's bibliography.
|
/// Find the document's bibliography.
|
||||||
pub fn find(introspector: Tracked<Introspector>) -> StrResult<Packed<Self>> {
|
pub fn find(introspector: Tracked<Introspector>) -> StrResult<Packed<Self>> {
|
||||||
@ -169,13 +168,12 @@ impl BibliographyElem {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the bibliography contains the given key.
|
/// Whether the bibliography contains the given key.
|
||||||
pub fn has(engine: &Engine, key: impl Into<PicoStr>) -> bool {
|
pub fn has(engine: &Engine, key: Label) -> bool {
|
||||||
let key = key.into();
|
|
||||||
engine
|
engine
|
||||||
.introspector
|
.introspector
|
||||||
.query(&Self::elem().select())
|
.query(&Self::elem().select())
|
||||||
.iter()
|
.iter()
|
||||||
.any(|elem| elem.to_packed::<Self>().unwrap().bibliography().has(key))
|
.any(|elem| elem.to_packed::<Self>().unwrap().sources.derived.has(key))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find all bibliography keys.
|
/// Find all bibliography keys.
|
||||||
@ -183,9 +181,9 @@ impl BibliographyElem {
|
|||||||
let mut vec = vec![];
|
let mut vec = vec![];
|
||||||
for elem in introspector.query(&Self::elem().select()).iter() {
|
for elem in introspector.query(&Self::elem().select()).iter() {
|
||||||
let this = elem.to_packed::<Self>().unwrap();
|
let this = elem.to_packed::<Self>().unwrap();
|
||||||
for (key, entry) in this.bibliography().iter() {
|
for (key, entry) in this.sources.derived.iter() {
|
||||||
let detail = entry.title().map(|title| title.value.to_str().into());
|
let detail = entry.title().map(|title| title.value.to_str().into());
|
||||||
vec.push((Label::new(key), detail))
|
vec.push((key, detail))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
vec
|
vec
|
||||||
@ -282,63 +280,35 @@ impl LocalName for Packed<BibliographyElem> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A loaded bibliography.
|
/// A loaded bibliography.
|
||||||
#[derive(Clone, PartialEq)]
|
#[derive(Clone, PartialEq, Hash)]
|
||||||
pub struct Bibliography {
|
pub struct Bibliography(Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry>>>);
|
||||||
map: Arc<IndexMap<PicoStr, hayagriva::Entry>>,
|
|
||||||
hash: u128,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Bibliography {
|
impl Bibliography {
|
||||||
/// Parse the bibliography argument.
|
/// Load a bibliography from data sources.
|
||||||
fn parse(
|
fn load(
|
||||||
engine: &mut Engine,
|
world: Tracked<dyn World + '_>,
|
||||||
args: &mut Args,
|
sources: Spanned<OneOrMultiple<DataSource>>,
|
||||||
) -> SourceResult<(BibliographyPaths, Bibliography)> {
|
) -> SourceResult<Derived<OneOrMultiple<DataSource>, Self>> {
|
||||||
let Spanned { v: paths, span } =
|
let data = sources.load(world)?;
|
||||||
args.expect::<Spanned<BibliographyPaths>>("path to bibliography file")?;
|
let bibliography = Self::decode(&sources.v, &data).at(sources.span)?;
|
||||||
|
Ok(Derived::new(sources.v, bibliography))
|
||||||
// Load bibliography files.
|
|
||||||
let data = paths
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.map(|path| {
|
|
||||||
let id = span.resolve_path(path).at(span)?;
|
|
||||||
engine.world.file(id).at(span)
|
|
||||||
})
|
|
||||||
.collect::<SourceResult<Vec<Bytes>>>()?;
|
|
||||||
|
|
||||||
// Parse.
|
|
||||||
let bibliography = Self::load(&paths, &data).at(span)?;
|
|
||||||
|
|
||||||
Ok((paths, bibliography))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load bibliography entries from paths.
|
/// Decode a bibliography from loaded data sources.
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
#[typst_macros::time(name = "load bibliography")]
|
#[typst_macros::time(name = "load bibliography")]
|
||||||
fn load(paths: &BibliographyPaths, data: &[Bytes]) -> StrResult<Bibliography> {
|
fn decode(
|
||||||
|
sources: &OneOrMultiple<DataSource>,
|
||||||
|
data: &[Bytes],
|
||||||
|
) -> StrResult<Bibliography> {
|
||||||
let mut map = IndexMap::new();
|
let mut map = IndexMap::new();
|
||||||
let mut duplicates = Vec::<EcoString>::new();
|
let mut duplicates = Vec::<EcoString>::new();
|
||||||
|
|
||||||
// We might have multiple bib/yaml files
|
// We might have multiple bib/yaml files
|
||||||
for (path, bytes) in paths.0.iter().zip(data) {
|
for (source, data) in sources.0.iter().zip(data) {
|
||||||
let src = std::str::from_utf8(bytes).map_err(FileError::from)?;
|
let library = decode_library(source, data)?;
|
||||||
|
|
||||||
let ext = Path::new(path.as_str())
|
|
||||||
.extension()
|
|
||||||
.and_then(OsStr::to_str)
|
|
||||||
.unwrap_or_default();
|
|
||||||
|
|
||||||
let library = match ext.to_lowercase().as_str() {
|
|
||||||
"yml" | "yaml" => hayagriva::io::from_yaml_str(src)
|
|
||||||
.map_err(|err| eco_format!("failed to parse YAML ({err})"))?,
|
|
||||||
"bib" => hayagriva::io::from_biblatex_str(src)
|
|
||||||
.map_err(|errors| format_biblatex_error(path, src, errors))?,
|
|
||||||
_ => bail!("unknown bibliography format (must be .yml/.yaml or .bib)"),
|
|
||||||
};
|
|
||||||
|
|
||||||
for entry in library {
|
for entry in library {
|
||||||
match map.entry(PicoStr::intern(entry.key())) {
|
match map.entry(Label::new(PicoStr::intern(entry.key()))) {
|
||||||
indexmap::map::Entry::Vacant(vacant) => {
|
indexmap::map::Entry::Vacant(vacant) => {
|
||||||
vacant.insert(entry);
|
vacant.insert(entry);
|
||||||
}
|
}
|
||||||
@ -353,182 +323,210 @@ impl Bibliography {
|
|||||||
bail!("duplicate bibliography keys: {}", duplicates.join(", "));
|
bail!("duplicate bibliography keys: {}", duplicates.join(", "));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Bibliography {
|
Ok(Bibliography(Arc::new(ManuallyHash::new(map, typst_utils::hash128(data)))))
|
||||||
map: Arc::new(map),
|
|
||||||
hash: typst_utils::hash128(data),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has(&self, key: impl Into<PicoStr>) -> bool {
|
fn has(&self, key: Label) -> bool {
|
||||||
self.map.contains_key(&key.into())
|
self.0.contains_key(&key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn iter(&self) -> impl Iterator<Item = (PicoStr, &hayagriva::Entry)> {
|
fn get(&self, key: Label) -> Option<&hayagriva::Entry> {
|
||||||
self.map.iter().map(|(&k, v)| (k, v))
|
self.0.get(&key)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter(&self) -> impl Iterator<Item = (Label, &hayagriva::Entry)> {
|
||||||
|
self.0.iter().map(|(&k, v)| (k, v))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for Bibliography {
|
impl Debug for Bibliography {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
f.debug_set().entries(self.map.keys()).finish()
|
f.debug_set().entries(self.0.keys()).finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Hash for Bibliography {
|
/// Decode on library from one data source.
|
||||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
fn decode_library(source: &DataSource, data: &Bytes) -> StrResult<Library> {
|
||||||
self.hash.hash(state);
|
let src = data.as_str().map_err(FileError::from)?;
|
||||||
|
|
||||||
|
if let DataSource::Path(path) = source {
|
||||||
|
// If we got a path, use the extension to determine whether it is
|
||||||
|
// YAML or BibLaTeX.
|
||||||
|
let ext = Path::new(path.as_str())
|
||||||
|
.extension()
|
||||||
|
.and_then(OsStr::to_str)
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
match ext.to_lowercase().as_str() {
|
||||||
|
"yml" | "yaml" => hayagriva::io::from_yaml_str(src)
|
||||||
|
.map_err(|err| eco_format!("failed to parse YAML ({err})")),
|
||||||
|
"bib" => hayagriva::io::from_biblatex_str(src)
|
||||||
|
.map_err(|errors| format_biblatex_error(src, Some(path), errors)),
|
||||||
|
_ => bail!("unknown bibliography format (must be .yml/.yaml or .bib)"),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// If we just got bytes, we need to guess. If it can be decoded as
|
||||||
|
// hayagriva YAML, we'll use that.
|
||||||
|
let haya_err = match hayagriva::io::from_yaml_str(src) {
|
||||||
|
Ok(library) => return Ok(library),
|
||||||
|
Err(err) => err,
|
||||||
|
};
|
||||||
|
|
||||||
|
// If it can be decoded as BibLaTeX, we use that isntead.
|
||||||
|
let bib_errs = match hayagriva::io::from_biblatex_str(src) {
|
||||||
|
Ok(library) => return Ok(library),
|
||||||
|
Err(err) => err,
|
||||||
|
};
|
||||||
|
|
||||||
|
// If neither decoded correctly, check whether `:` or `{` appears
|
||||||
|
// more often to guess whether it's more likely to be YAML or BibLaTeX
|
||||||
|
// and emit the more appropriate error.
|
||||||
|
let mut yaml = 0;
|
||||||
|
let mut biblatex = 0;
|
||||||
|
for c in src.chars() {
|
||||||
|
match c {
|
||||||
|
':' => yaml += 1,
|
||||||
|
'{' => biblatex += 1,
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if yaml > biblatex {
|
||||||
|
bail!("failed to parse YAML ({haya_err})")
|
||||||
|
} else {
|
||||||
|
Err(format_biblatex_error(src, None, bib_errs))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Format a BibLaTeX loading error.
|
/// Format a BibLaTeX loading error.
|
||||||
fn format_biblatex_error(path: &str, src: &str, errors: Vec<BibLaTeXError>) -> EcoString {
|
fn format_biblatex_error(
|
||||||
|
src: &str,
|
||||||
|
path: Option<&str>,
|
||||||
|
errors: Vec<BibLaTeXError>,
|
||||||
|
) -> EcoString {
|
||||||
let Some(error) = errors.first() else {
|
let Some(error) = errors.first() else {
|
||||||
return eco_format!("failed to parse BibLaTeX file ({path})");
|
return match path {
|
||||||
|
Some(path) => eco_format!("failed to parse BibLaTeX file ({path})"),
|
||||||
|
None => eco_format!("failed to parse BibLaTeX"),
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
let (span, msg) = match error {
|
let (span, msg) = match error {
|
||||||
BibLaTeXError::Parse(error) => (&error.span, error.kind.to_string()),
|
BibLaTeXError::Parse(error) => (&error.span, error.kind.to_string()),
|
||||||
BibLaTeXError::Type(error) => (&error.span, error.kind.to_string()),
|
BibLaTeXError::Type(error) => (&error.span, error.kind.to_string()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let line = src.get(..span.start).unwrap_or_default().lines().count();
|
let line = src.get(..span.start).unwrap_or_default().lines().count();
|
||||||
eco_format!("failed to parse BibLaTeX file ({path}:{line}: {msg})")
|
match path {
|
||||||
|
Some(path) => eco_format!("failed to parse BibLaTeX file ({path}:{line}: {msg})"),
|
||||||
|
None => eco_format!("failed to parse BibLaTeX ({line}: {msg})"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A loaded CSL style.
|
/// A loaded CSL style.
|
||||||
#[ty(cast)]
|
|
||||||
#[derive(Debug, Clone, PartialEq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
pub struct CslStyle {
|
pub struct CslStyle(Arc<ManuallyHash<citationberg::IndependentStyle>>);
|
||||||
name: Option<EcoString>,
|
|
||||||
style: Arc<LazyHash<citationberg::IndependentStyle>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CslStyle {
|
impl CslStyle {
|
||||||
/// Parse the style argument.
|
/// Load a CSL style from a data source.
|
||||||
pub fn parse(engine: &mut Engine, args: &mut Args) -> SourceResult<Option<CslStyle>> {
|
pub fn load(
|
||||||
let Some(Spanned { v: string, span }) =
|
world: Tracked<dyn World + '_>,
|
||||||
args.named::<Spanned<EcoString>>("style")?
|
Spanned { v: source, span }: Spanned<CslSource>,
|
||||||
else {
|
) -> SourceResult<Derived<CslSource, Self>> {
|
||||||
return Ok(None);
|
let style = match &source {
|
||||||
};
|
CslSource::Named(style) => Self::from_archived(*style),
|
||||||
|
CslSource::Normal(source) => {
|
||||||
Ok(Some(Self::parse_impl(engine, &string, span).at(span)?))
|
let data = Spanned::new(source, span).load(world)?;
|
||||||
}
|
Self::from_data(data).at(span)?
|
||||||
|
|
||||||
/// Parse the style argument with `Smart`.
|
|
||||||
pub fn parse_smart(
|
|
||||||
engine: &mut Engine,
|
|
||||||
args: &mut Args,
|
|
||||||
) -> SourceResult<Option<Smart<CslStyle>>> {
|
|
||||||
let Some(Spanned { v: smart, span }) =
|
|
||||||
args.named::<Spanned<Smart<EcoString>>>("style")?
|
|
||||||
else {
|
|
||||||
return Ok(None);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Some(match smart {
|
|
||||||
Smart::Auto => Smart::Auto,
|
|
||||||
Smart::Custom(string) => {
|
|
||||||
Smart::Custom(Self::parse_impl(engine, &string, span).at(span)?)
|
|
||||||
}
|
}
|
||||||
}))
|
};
|
||||||
}
|
Ok(Derived::new(source, style))
|
||||||
|
|
||||||
/// Parse internally.
|
|
||||||
fn parse_impl(engine: &mut Engine, string: &str, span: Span) -> StrResult<CslStyle> {
|
|
||||||
let ext = Path::new(string)
|
|
||||||
.extension()
|
|
||||||
.and_then(OsStr::to_str)
|
|
||||||
.unwrap_or_default()
|
|
||||||
.to_lowercase();
|
|
||||||
|
|
||||||
if ext == "csl" {
|
|
||||||
let id = span.resolve_path(string)?;
|
|
||||||
let data = engine.world.file(id)?;
|
|
||||||
CslStyle::from_data(&data)
|
|
||||||
} else {
|
|
||||||
CslStyle::from_name(string)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load a built-in CSL style.
|
/// Load a built-in CSL style.
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
pub fn from_name(name: &str) -> StrResult<CslStyle> {
|
pub fn from_archived(archived: ArchivedStyle) -> CslStyle {
|
||||||
match hayagriva::archive::ArchivedStyle::by_name(name).map(ArchivedStyle::get) {
|
match archived.get() {
|
||||||
Some(citationberg::Style::Independent(style)) => Ok(Self {
|
citationberg::Style::Independent(style) => Self(Arc::new(ManuallyHash::new(
|
||||||
name: Some(name.into()),
|
style,
|
||||||
style: Arc::new(LazyHash::new(style)),
|
typst_utils::hash128(&(TypeId::of::<ArchivedStyle>(), archived)),
|
||||||
}),
|
))),
|
||||||
_ => bail!("unknown style: `{name}`"),
|
// Ensured by `test_bibliography_load_builtin_styles`.
|
||||||
|
_ => unreachable!("archive should not contain dependant styles"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load a CSL style from file contents.
|
/// Load a CSL style from file contents.
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
pub fn from_data(data: &Bytes) -> StrResult<CslStyle> {
|
pub fn from_data(data: Bytes) -> StrResult<CslStyle> {
|
||||||
let text = std::str::from_utf8(data.as_slice()).map_err(FileError::from)?;
|
let text = data.as_str().map_err(FileError::from)?;
|
||||||
citationberg::IndependentStyle::from_xml(text)
|
citationberg::IndependentStyle::from_xml(text)
|
||||||
.map(|style| Self { name: None, style: Arc::new(LazyHash::new(style)) })
|
.map(|style| {
|
||||||
|
Self(Arc::new(ManuallyHash::new(
|
||||||
|
style,
|
||||||
|
typst_utils::hash128(&(TypeId::of::<Bytes>(), data)),
|
||||||
|
)))
|
||||||
|
})
|
||||||
.map_err(|err| eco_format!("failed to load CSL style ({err})"))
|
.map_err(|err| eco_format!("failed to load CSL style ({err})"))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the underlying independent style.
|
/// Get the underlying independent style.
|
||||||
pub fn get(&self) -> &citationberg::IndependentStyle {
|
pub fn get(&self) -> &citationberg::IndependentStyle {
|
||||||
self.style.as_ref()
|
self.0.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// This Reflect impl is technically a bit wrong because it doesn't say what
|
/// Source for a CSL style.
|
||||||
// FromValue and IntoValue really do. Instead, it says what the `style` argument
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
// on `bibliography` and `cite` expect (through manual parsing).
|
pub enum CslSource {
|
||||||
impl Reflect for CslStyle {
|
/// A predefined named style.
|
||||||
|
Named(ArchivedStyle),
|
||||||
|
/// A normal data source.
|
||||||
|
Normal(DataSource),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Reflect for CslSource {
|
||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
fn input() -> CastInfo {
|
fn input() -> CastInfo {
|
||||||
let ty = std::iter::once(CastInfo::Type(Type::of::<Str>()));
|
let source = std::iter::once(DataSource::input());
|
||||||
let options = hayagriva::archive::ArchivedStyle::all().iter().map(|name| {
|
let names = ArchivedStyle::all().iter().map(|name| {
|
||||||
CastInfo::Value(name.names()[0].into_value(), name.display_name())
|
CastInfo::Value(name.names()[0].into_value(), name.display_name())
|
||||||
});
|
});
|
||||||
CastInfo::Union(ty.chain(options).collect())
|
CastInfo::Union(source.into_iter().chain(names).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn output() -> CastInfo {
|
fn output() -> CastInfo {
|
||||||
EcoString::output()
|
DataSource::output()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn castable(value: &Value) -> bool {
|
fn castable(value: &Value) -> bool {
|
||||||
if let Value::Dyn(dynamic) = &value {
|
DataSource::castable(value)
|
||||||
if dynamic.is::<Self>() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromValue for CslStyle {
|
impl FromValue for CslSource {
|
||||||
fn from_value(value: Value) -> HintedStrResult<Self> {
|
fn from_value(value: Value) -> HintedStrResult<Self> {
|
||||||
if let Value::Dyn(dynamic) = &value {
|
if EcoString::castable(&value) {
|
||||||
if let Some(concrete) = dynamic.downcast::<Self>() {
|
let string = EcoString::from_value(value.clone())?;
|
||||||
return Ok(concrete.clone());
|
if Path::new(string.as_str()).extension().is_none() {
|
||||||
|
let style = ArchivedStyle::by_name(&string)
|
||||||
|
.ok_or_else(|| eco_format!("unknown style: {}", string))?;
|
||||||
|
return Ok(CslSource::Named(style));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Err(<Self as Reflect>::error(&value))
|
DataSource::from_value(value).map(CslSource::Normal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoValue for CslStyle {
|
impl IntoValue for CslSource {
|
||||||
fn into_value(self) -> Value {
|
fn into_value(self) -> Value {
|
||||||
Value::dynamic(self)
|
match self {
|
||||||
}
|
// We prefer the shorter names which are at the back of the array.
|
||||||
}
|
Self::Named(v) => v.names().last().unwrap().into_value(),
|
||||||
|
Self::Normal(v) => v.into_value(),
|
||||||
impl Repr for CslStyle {
|
}
|
||||||
fn repr(&self) -> EcoString {
|
|
||||||
self.name
|
|
||||||
.as_ref()
|
|
||||||
.map(|name| name.repr())
|
|
||||||
.unwrap_or_else(|| "..".into())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -632,16 +630,15 @@ impl<'a> Generator<'a> {
|
|||||||
static LOCALES: LazyLock<Vec<citationberg::Locale>> =
|
static LOCALES: LazyLock<Vec<citationberg::Locale>> =
|
||||||
LazyLock::new(hayagriva::archive::locales);
|
LazyLock::new(hayagriva::archive::locales);
|
||||||
|
|
||||||
let database = self.bibliography.bibliography();
|
let database = &self.bibliography.sources.derived;
|
||||||
let bibliography_style = self.bibliography.style(StyleChain::default());
|
let bibliography_style = &self.bibliography.style(StyleChain::default()).derived;
|
||||||
let styles = Arena::new();
|
|
||||||
|
|
||||||
// Process all citation groups.
|
// Process all citation groups.
|
||||||
let mut driver = BibliographyDriver::new();
|
let mut driver = BibliographyDriver::new();
|
||||||
for elem in &self.groups {
|
for elem in &self.groups {
|
||||||
let group = elem.to_packed::<CiteGroup>().unwrap();
|
let group = elem.to_packed::<CiteGroup>().unwrap();
|
||||||
let location = elem.location().unwrap();
|
let location = elem.location().unwrap();
|
||||||
let children = group.children();
|
let children = &group.children;
|
||||||
|
|
||||||
// Groups should never be empty.
|
// Groups should never be empty.
|
||||||
let Some(first) = children.first() else { continue };
|
let Some(first) = children.first() else { continue };
|
||||||
@ -653,12 +650,11 @@ impl<'a> Generator<'a> {
|
|||||||
|
|
||||||
// Create infos and items for each child in the group.
|
// Create infos and items for each child in the group.
|
||||||
for child in children {
|
for child in children {
|
||||||
let key = *child.key();
|
let Some(entry) = database.get(child.key) else {
|
||||||
let Some(entry) = database.map.get(&key.into_inner()) else {
|
|
||||||
errors.push(error!(
|
errors.push(error!(
|
||||||
child.span(),
|
child.span(),
|
||||||
"key `{}` does not exist in the bibliography",
|
"key `{}` does not exist in the bibliography",
|
||||||
key.resolve()
|
child.key.resolve()
|
||||||
));
|
));
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
@ -685,7 +681,7 @@ impl<'a> Generator<'a> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
normal &= special_form.is_none();
|
normal &= special_form.is_none();
|
||||||
subinfos.push(CiteInfo { key, supplement, hidden });
|
subinfos.push(CiteInfo { key: child.key, supplement, hidden });
|
||||||
items.push(CitationItem::new(entry, locator, None, hidden, special_form));
|
items.push(CitationItem::new(entry, locator, None, hidden, special_form));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -695,8 +691,8 @@ impl<'a> Generator<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let style = match first.style(StyleChain::default()) {
|
let style = match first.style(StyleChain::default()) {
|
||||||
Smart::Auto => &bibliography_style.style,
|
Smart::Auto => bibliography_style.get(),
|
||||||
Smart::Custom(style) => styles.alloc(style.style),
|
Smart::Custom(style) => style.derived.get(),
|
||||||
};
|
};
|
||||||
|
|
||||||
self.infos.push(GroupInfo {
|
self.infos.push(GroupInfo {
|
||||||
@ -727,7 +723,7 @@ impl<'a> Generator<'a> {
|
|||||||
// Add hidden items for everything if we should print the whole
|
// Add hidden items for everything if we should print the whole
|
||||||
// bibliography.
|
// bibliography.
|
||||||
if self.bibliography.full(StyleChain::default()) {
|
if self.bibliography.full(StyleChain::default()) {
|
||||||
for entry in database.map.values() {
|
for (_, entry) in database.iter() {
|
||||||
driver.citation(CitationRequest::new(
|
driver.citation(CitationRequest::new(
|
||||||
vec![CitationItem::new(entry, None, None, true, None)],
|
vec![CitationItem::new(entry, None, None, true, None)],
|
||||||
bibliography_style.get(),
|
bibliography_style.get(),
|
||||||
@ -1097,3 +1093,15 @@ fn locale(lang: Lang, region: Option<Region>) -> citationberg::LocaleCode {
|
|||||||
}
|
}
|
||||||
citationberg::LocaleCode(value)
|
citationberg::LocaleCode(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_bibliography_load_builtin_styles() {
|
||||||
|
for &archived in ArchivedStyle::all() {
|
||||||
|
let _ = CslStyle::from_archived(archived);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
use crate::diag::{error, At, HintedString, SourceResult};
|
use crate::diag::{error, At, HintedString, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, elem, Cast, Content, Label, Packed, Show, Smart, StyleChain, Synthesize,
|
cast, elem, Cast, Content, Derived, Label, Packed, Show, Smart, StyleChain,
|
||||||
|
Synthesize,
|
||||||
};
|
};
|
||||||
use crate::introspection::Locatable;
|
use crate::introspection::Locatable;
|
||||||
use crate::model::bibliography::Works;
|
use crate::model::bibliography::Works;
|
||||||
use crate::model::CslStyle;
|
use crate::model::{CslSource, CslStyle};
|
||||||
use crate::text::{Lang, Region, TextElem};
|
use crate::text::{Lang, Region, TextElem};
|
||||||
|
|
||||||
/// Cite a work from the bibliography.
|
/// Cite a work from the bibliography.
|
||||||
@ -87,15 +90,24 @@ pub struct CiteElem {
|
|||||||
|
|
||||||
/// The citation style.
|
/// The citation style.
|
||||||
///
|
///
|
||||||
/// Should be either `{auto}`, one of the built-in styles (see below) or a
|
/// This can be:
|
||||||
/// path to a [CSL file](https://citationstyles.org/). Some of the styles
|
/// - `{auto}` to automatically use the
|
||||||
/// listed below appear twice, once with their full name and once with a
|
/// [bibliography's style]($bibliography.style) for citations.
|
||||||
/// short alias.
|
/// - A string with the name of one of the built-in styles (see below). Some
|
||||||
///
|
/// of the styles listed below appear twice, once with their full name and
|
||||||
/// When set to `{auto}`, automatically use the
|
/// once with a short alias.
|
||||||
/// [bibliography's style]($bibliography.style) for the citations.
|
/// - A path string to a [CSL file](https://citationstyles.org/). For more
|
||||||
#[parse(CslStyle::parse_smart(engine, args)?)]
|
/// details about paths, see the [Paths section]($syntax/#paths).
|
||||||
pub style: Smart<CslStyle>,
|
/// - Raw bytes from which a CSL style should be decoded.
|
||||||
|
#[parse(match args.named::<Spanned<Smart<CslSource>>>("style")? {
|
||||||
|
Some(Spanned { v: Smart::Custom(source), span }) => Some(Smart::Custom(
|
||||||
|
CslStyle::load(engine.world, Spanned::new(source, span))?
|
||||||
|
)),
|
||||||
|
Some(Spanned { v: Smart::Auto, .. }) => Some(Smart::Auto),
|
||||||
|
None => None,
|
||||||
|
})]
|
||||||
|
#[borrowed]
|
||||||
|
pub style: Smart<Derived<CslSource, CslStyle>>,
|
||||||
|
|
||||||
/// The text language setting where the citation is.
|
/// The text language setting where the citation is.
|
||||||
#[internal]
|
#[internal]
|
||||||
|
@ -3,8 +3,8 @@ use ecow::EcoString;
|
|||||||
use crate::diag::{bail, HintedStrResult, SourceResult};
|
use crate::diag::{bail, HintedStrResult, SourceResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, elem, Args, Array, Construct, Content, Datetime, Fields, Smart, StyleChain,
|
cast, elem, Args, Array, Construct, Content, Datetime, Fields, OneOrMultiple, Smart,
|
||||||
Styles, Value,
|
StyleChain, Styles, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The root element of a document and its metadata.
|
/// The root element of a document and its metadata.
|
||||||
@ -35,7 +35,7 @@ pub struct DocumentElem {
|
|||||||
|
|
||||||
/// The document's authors.
|
/// The document's authors.
|
||||||
#[ghost]
|
#[ghost]
|
||||||
pub author: Author,
|
pub author: OneOrMultiple<EcoString>,
|
||||||
|
|
||||||
/// The document's description.
|
/// The document's description.
|
||||||
#[ghost]
|
#[ghost]
|
||||||
@ -43,7 +43,7 @@ pub struct DocumentElem {
|
|||||||
|
|
||||||
/// The document's keywords.
|
/// The document's keywords.
|
||||||
#[ghost]
|
#[ghost]
|
||||||
pub keywords: Keywords,
|
pub keywords: OneOrMultiple<EcoString>,
|
||||||
|
|
||||||
/// The document's creation date.
|
/// The document's creation date.
|
||||||
///
|
///
|
||||||
@ -93,7 +93,7 @@ cast! {
|
|||||||
pub struct DocumentInfo {
|
pub struct DocumentInfo {
|
||||||
/// The document's title.
|
/// The document's title.
|
||||||
pub title: Option<EcoString>,
|
pub title: Option<EcoString>,
|
||||||
/// The document's author.
|
/// The document's author(s).
|
||||||
pub author: Vec<EcoString>,
|
pub author: Vec<EcoString>,
|
||||||
/// The document's description.
|
/// The document's description.
|
||||||
pub description: Option<EcoString>,
|
pub description: Option<EcoString>,
|
||||||
|
@ -257,7 +257,7 @@ impl Synthesize for Packed<FigureElem> {
|
|||||||
|
|
||||||
// Determine the figure's kind.
|
// Determine the figure's kind.
|
||||||
let kind = elem.kind(styles).unwrap_or_else(|| {
|
let kind = elem.kind(styles).unwrap_or_else(|| {
|
||||||
elem.body()
|
elem.body
|
||||||
.query_first(&Selector::can::<dyn Figurable>())
|
.query_first(&Selector::can::<dyn Figurable>())
|
||||||
.map(|elem| FigureKind::Elem(elem.func()))
|
.map(|elem| FigureKind::Elem(elem.func()))
|
||||||
.unwrap_or_else(|| FigureKind::Elem(ImageElem::elem()))
|
.unwrap_or_else(|| FigureKind::Elem(ImageElem::elem()))
|
||||||
@ -288,14 +288,13 @@ impl Synthesize for Packed<FigureElem> {
|
|||||||
// Resolve the supplement with the first descendant of the kind or
|
// Resolve the supplement with the first descendant of the kind or
|
||||||
// just the body, if none was found.
|
// just the body, if none was found.
|
||||||
let descendant = match kind {
|
let descendant = match kind {
|
||||||
FigureKind::Elem(func) => elem
|
FigureKind::Elem(func) => {
|
||||||
.body()
|
elem.body.query_first(&Selector::Elem(func, None)).map(Cow::Owned)
|
||||||
.query_first(&Selector::Elem(func, None))
|
}
|
||||||
.map(Cow::Owned),
|
|
||||||
FigureKind::Name(_) => None,
|
FigureKind::Name(_) => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let target = descendant.unwrap_or_else(|| Cow::Borrowed(elem.body()));
|
let target = descendant.unwrap_or_else(|| Cow::Borrowed(&elem.body));
|
||||||
Some(supplement.resolve(engine, styles, [target])?)
|
Some(supplement.resolve(engine, styles, [target])?)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -437,7 +436,7 @@ impl Outlinable for Packed<FigureElem> {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut realized = caption.body().clone();
|
let mut realized = caption.body.clone();
|
||||||
if let (
|
if let (
|
||||||
Smart::Custom(Some(Supplement::Content(mut supplement))),
|
Smart::Custom(Some(Supplement::Content(mut supplement))),
|
||||||
Some(Some(counter)),
|
Some(Some(counter)),
|
||||||
@ -460,7 +459,7 @@ impl Outlinable for Packed<FigureElem> {
|
|||||||
|
|
||||||
let separator = caption.get_separator(StyleChain::default());
|
let separator = caption.get_separator(StyleChain::default());
|
||||||
|
|
||||||
realized = supplement + numbers + separator + caption.body();
|
realized = supplement + numbers + separator + caption.body.clone();
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(realized))
|
Ok(Some(realized))
|
||||||
@ -604,7 +603,7 @@ impl Synthesize for Packed<FigureCaption> {
|
|||||||
impl Show for Packed<FigureCaption> {
|
impl Show for Packed<FigureCaption> {
|
||||||
#[typst_macros::time(name = "figure.caption", span = self.span())]
|
#[typst_macros::time(name = "figure.caption", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let mut realized = self.body().clone();
|
let mut realized = self.body.clone();
|
||||||
|
|
||||||
if let (
|
if let (
|
||||||
Some(Some(mut supplement)),
|
Some(Some(mut supplement)),
|
||||||
|
@ -105,12 +105,12 @@ impl FootnoteElem {
|
|||||||
|
|
||||||
/// Tests if this footnote is a reference to another footnote.
|
/// Tests if this footnote is a reference to another footnote.
|
||||||
pub fn is_ref(&self) -> bool {
|
pub fn is_ref(&self) -> bool {
|
||||||
matches!(self.body(), FootnoteBody::Reference(_))
|
matches!(self.body, FootnoteBody::Reference(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the content of the body of this footnote if it is not a ref.
|
/// Returns the content of the body of this footnote if it is not a ref.
|
||||||
pub fn body_content(&self) -> Option<&Content> {
|
pub fn body_content(&self) -> Option<&Content> {
|
||||||
match self.body() {
|
match &self.body {
|
||||||
FootnoteBody::Content(content) => Some(content),
|
FootnoteBody::Content(content) => Some(content),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
@ -120,9 +120,9 @@ impl FootnoteElem {
|
|||||||
impl Packed<FootnoteElem> {
|
impl Packed<FootnoteElem> {
|
||||||
/// Returns the location of the definition of this footnote.
|
/// Returns the location of the definition of this footnote.
|
||||||
pub fn declaration_location(&self, engine: &Engine) -> StrResult<Location> {
|
pub fn declaration_location(&self, engine: &Engine) -> StrResult<Location> {
|
||||||
match self.body() {
|
match self.body {
|
||||||
FootnoteBody::Reference(label) => {
|
FootnoteBody::Reference(label) => {
|
||||||
let element = engine.introspector.query_label(*label)?;
|
let element = engine.introspector.query_label(label)?;
|
||||||
let footnote = element
|
let footnote = element
|
||||||
.to_packed::<FootnoteElem>()
|
.to_packed::<FootnoteElem>()
|
||||||
.ok_or("referenced element should be a footnote")?;
|
.ok_or("referenced element should be a footnote")?;
|
||||||
@ -281,12 +281,11 @@ impl Show for Packed<FootnoteEntry> {
|
|||||||
#[typst_macros::time(name = "footnote.entry", span = self.span())]
|
#[typst_macros::time(name = "footnote.entry", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let span = self.span();
|
let span = self.span();
|
||||||
let note = self.note();
|
|
||||||
let number_gap = Em::new(0.05);
|
let number_gap = Em::new(0.05);
|
||||||
let default = StyleChain::default();
|
let default = StyleChain::default();
|
||||||
let numbering = note.numbering(default);
|
let numbering = self.note.numbering(default);
|
||||||
let counter = Counter::of(FootnoteElem::elem());
|
let counter = Counter::of(FootnoteElem::elem());
|
||||||
let Some(loc) = note.location() else {
|
let Some(loc) = self.note.location() else {
|
||||||
bail!(
|
bail!(
|
||||||
span, "footnote entry must have a location";
|
span, "footnote entry must have a location";
|
||||||
hint: "try using a query or a show rule to customize the footnote instead"
|
hint: "try using a query or a show rule to customize the footnote instead"
|
||||||
@ -304,7 +303,7 @@ impl Show for Packed<FootnoteEntry> {
|
|||||||
HElem::new(self.indent(styles).into()).pack(),
|
HElem::new(self.indent(styles).into()).pack(),
|
||||||
sup,
|
sup,
|
||||||
HElem::new(number_gap.into()).with_weak(true).pack(),
|
HElem::new(number_gap.into()).with_weak(true).pack(),
|
||||||
note.body_content().unwrap().clone(),
|
self.note.body_content().unwrap().clone(),
|
||||||
]))
|
]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,7 +223,7 @@ impl Show for Packed<HeadingElem> {
|
|||||||
const SPACING_TO_NUMBERING: Em = Em::new(0.3);
|
const SPACING_TO_NUMBERING: Em = Em::new(0.3);
|
||||||
|
|
||||||
let span = self.span();
|
let span = self.span();
|
||||||
let mut realized = self.body().clone();
|
let mut realized = self.body.clone();
|
||||||
|
|
||||||
let hanging_indent = self.hanging_indent(styles);
|
let hanging_indent = self.hanging_indent(styles);
|
||||||
let mut indent = match hanging_indent {
|
let mut indent = match hanging_indent {
|
||||||
@ -360,7 +360,7 @@ impl Outlinable for Packed<HeadingElem> {
|
|||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut content = self.body().clone();
|
let mut content = self.body.clone();
|
||||||
if let Some(numbering) = (**self).numbering(StyleChain::default()).as_ref() {
|
if let Some(numbering) = (**self).numbering(StyleChain::default()).as_ref() {
|
||||||
let numbers = Counter::of(HeadingElem::elem()).display_at_loc(
|
let numbers = Counter::of(HeadingElem::elem()).display_at_loc(
|
||||||
engine,
|
engine,
|
||||||
|
@ -102,11 +102,10 @@ impl LinkElem {
|
|||||||
impl Show for Packed<LinkElem> {
|
impl Show for Packed<LinkElem> {
|
||||||
#[typst_macros::time(name = "link", span = self.span())]
|
#[typst_macros::time(name = "link", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let body = self.body().clone();
|
let body = self.body.clone();
|
||||||
let dest = self.dest();
|
|
||||||
|
|
||||||
Ok(if TargetElem::target_in(styles).is_html() {
|
Ok(if TargetElem::target_in(styles).is_html() {
|
||||||
if let LinkTarget::Dest(Destination::Url(url)) = dest {
|
if let LinkTarget::Dest(Destination::Url(url)) = &self.dest {
|
||||||
HtmlElem::new(tag::a)
|
HtmlElem::new(tag::a)
|
||||||
.with_attr(attr::href, url.clone().into_inner())
|
.with_attr(attr::href, url.clone().into_inner())
|
||||||
.with_body(Some(body))
|
.with_body(Some(body))
|
||||||
@ -120,7 +119,7 @@ impl Show for Packed<LinkElem> {
|
|||||||
body
|
body
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let linked = match self.dest() {
|
let linked = match &self.dest {
|
||||||
LinkTarget::Dest(dest) => body.linked(dest.clone()),
|
LinkTarget::Dest(dest) => body.linked(dest.clone()),
|
||||||
LinkTarget::Label(label) => {
|
LinkTarget::Label(label) => {
|
||||||
let elem = engine.introspector.query_label(*label).at(self.span())?;
|
let elem = engine.introspector.query_label(*label).at(self.span())?;
|
||||||
|
@ -53,9 +53,7 @@ use crate::text::Case;
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn numbering(
|
pub fn numbering(
|
||||||
/// The engine.
|
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite context.
|
|
||||||
context: Tracked<Context>,
|
context: Tracked<Context>,
|
||||||
/// Defines how the numbering works.
|
/// Defines how the numbering works.
|
||||||
///
|
///
|
||||||
|
@ -219,8 +219,7 @@ impl Show for Packed<OutlineElem> {
|
|||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let level = entry.level();
|
if depth < entry.level {
|
||||||
if depth < *level {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,7 +228,7 @@ impl Show for Packed<OutlineElem> {
|
|||||||
while ancestors
|
while ancestors
|
||||||
.last()
|
.last()
|
||||||
.and_then(|ancestor| ancestor.with::<dyn Outlinable>())
|
.and_then(|ancestor| ancestor.with::<dyn Outlinable>())
|
||||||
.is_some_and(|last| last.level() >= *level)
|
.is_some_and(|last| last.level() >= entry.level)
|
||||||
{
|
{
|
||||||
ancestors.pop();
|
ancestors.pop();
|
||||||
}
|
}
|
||||||
@ -483,7 +482,7 @@ impl Show for Packed<OutlineEntry> {
|
|||||||
#[typst_macros::time(name = "outline.entry", span = self.span())]
|
#[typst_macros::time(name = "outline.entry", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let mut seq = vec![];
|
let mut seq = vec![];
|
||||||
let elem = self.element();
|
let elem = &self.element;
|
||||||
|
|
||||||
// In case a user constructs an outline entry with an arbitrary element.
|
// In case a user constructs an outline entry with an arbitrary element.
|
||||||
let Some(location) = elem.location() else {
|
let Some(location) = elem.location() else {
|
||||||
@ -512,7 +511,7 @@ impl Show for Packed<OutlineEntry> {
|
|||||||
seq.push(TextElem::packed("\u{202B}"));
|
seq.push(TextElem::packed("\u{202B}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
seq.push(self.body().clone().linked(Destination::Location(location)));
|
seq.push(self.body.clone().linked(Destination::Location(location)));
|
||||||
|
|
||||||
if rtl {
|
if rtl {
|
||||||
// "Pop Directional Formatting"
|
// "Pop Directional Formatting"
|
||||||
@ -520,7 +519,7 @@ impl Show for Packed<OutlineEntry> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add filler symbols between the section name and page number.
|
// Add filler symbols between the section name and page number.
|
||||||
if let Some(filler) = self.fill() {
|
if let Some(filler) = &self.fill {
|
||||||
seq.push(SpaceElem::shared().clone());
|
seq.push(SpaceElem::shared().clone());
|
||||||
seq.push(
|
seq.push(
|
||||||
BoxElem::new()
|
BoxElem::new()
|
||||||
@ -535,7 +534,7 @@ impl Show for Packed<OutlineEntry> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add the page number.
|
// Add the page number.
|
||||||
let page = self.page().clone().linked(Destination::Location(location));
|
let page = self.page.clone().linked(Destination::Location(location));
|
||||||
seq.push(page);
|
seq.push(page);
|
||||||
|
|
||||||
Ok(Content::sequence(seq))
|
Ok(Content::sequence(seq))
|
||||||
|
@ -156,7 +156,7 @@ cast! {
|
|||||||
impl Show for Packed<QuoteElem> {
|
impl Show for Packed<QuoteElem> {
|
||||||
#[typst_macros::time(name = "quote", span = self.span())]
|
#[typst_macros::time(name = "quote", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let mut realized = self.body().clone();
|
let mut realized = self.body.clone();
|
||||||
let block = self.block(styles);
|
let block = self.block(styles);
|
||||||
|
|
||||||
if self.quotes(styles) == Smart::Custom(true) || !block {
|
if self.quotes(styles) == Smart::Custom(true) || !block {
|
||||||
|
@ -182,9 +182,8 @@ impl Synthesize for Packed<RefElem> {
|
|||||||
elem.push_citation(Some(citation));
|
elem.push_citation(Some(citation));
|
||||||
elem.push_element(None);
|
elem.push_element(None);
|
||||||
|
|
||||||
let target = *elem.target();
|
if !BibliographyElem::has(engine, elem.target) {
|
||||||
if !BibliographyElem::has(engine, target) {
|
if let Ok(found) = engine.introspector.query_label(elem.target).cloned() {
|
||||||
if let Ok(found) = engine.introspector.query_label(target).cloned() {
|
|
||||||
elem.push_element(Some(found));
|
elem.push_element(Some(found));
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
@ -197,8 +196,7 @@ impl Synthesize for Packed<RefElem> {
|
|||||||
impl Show for Packed<RefElem> {
|
impl Show for Packed<RefElem> {
|
||||||
#[typst_macros::time(name = "ref", span = self.span())]
|
#[typst_macros::time(name = "ref", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let target = *self.target();
|
let elem = engine.introspector.query_label(self.target);
|
||||||
let elem = engine.introspector.query_label(target);
|
|
||||||
let span = self.span();
|
let span = self.span();
|
||||||
|
|
||||||
let form = self.form(styles);
|
let form = self.form(styles);
|
||||||
@ -229,7 +227,7 @@ impl Show for Packed<RefElem> {
|
|||||||
}
|
}
|
||||||
// RefForm::Normal
|
// RefForm::Normal
|
||||||
|
|
||||||
if BibliographyElem::has(engine, target) {
|
if BibliographyElem::has(engine, self.target) {
|
||||||
if elem.is_ok() {
|
if elem.is_ok() {
|
||||||
bail!(span, "label occurs in the document and its bibliography");
|
bail!(span, "label occurs in the document and its bibliography");
|
||||||
}
|
}
|
||||||
@ -240,7 +238,7 @@ impl Show for Packed<RefElem> {
|
|||||||
let elem = elem.at(span)?;
|
let elem = elem.at(span)?;
|
||||||
|
|
||||||
if let Some(footnote) = elem.to_packed::<FootnoteElem>() {
|
if let Some(footnote) = elem.to_packed::<FootnoteElem>() {
|
||||||
return Ok(footnote.into_ref(target).pack().spanned(span));
|
return Ok(footnote.into_ref(self.target).pack().spanned(span));
|
||||||
}
|
}
|
||||||
|
|
||||||
let elem = elem.clone();
|
let elem = elem.clone();
|
||||||
@ -319,7 +317,7 @@ fn to_citation(
|
|||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<Packed<CiteElem>> {
|
) -> SourceResult<Packed<CiteElem>> {
|
||||||
let mut elem = Packed::new(CiteElem::new(*reference.target()).with_supplement(
|
let mut elem = Packed::new(CiteElem::new(reference.target).with_supplement(
|
||||||
match reference.supplement(styles).clone() {
|
match reference.supplement(styles).clone() {
|
||||||
Smart::Custom(Some(Supplement::Content(content))) => Some(content),
|
Smart::Custom(Some(Supplement::Content(content))) => Some(content),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -706,7 +706,7 @@ cast! {
|
|||||||
|
|
||||||
impl Show for Packed<TableCell> {
|
impl Show for Packed<TableCell> {
|
||||||
fn show(&self, _engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
show_grid_cell(self.body().clone(), self.inset(styles), self.align(styles))
|
show_grid_cell(self.body.clone(), self.inset(styles), self.align(styles))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,12 +151,12 @@ impl Show for Packed<TermsElem> {
|
|||||||
.then(|| HElem::new((-hanging_indent).into()).pack().spanned(span));
|
.then(|| HElem::new((-hanging_indent).into()).pack().spanned(span));
|
||||||
|
|
||||||
let mut children = vec![];
|
let mut children = vec![];
|
||||||
for child in self.children().iter() {
|
for child in self.children.iter() {
|
||||||
let mut seq = vec![];
|
let mut seq = vec![];
|
||||||
seq.extend(unpad.clone());
|
seq.extend(unpad.clone());
|
||||||
seq.push(child.term().clone().strong());
|
seq.push(child.term.clone().strong());
|
||||||
seq.push((*separator).clone());
|
seq.push((*separator).clone());
|
||||||
seq.push(child.description().clone());
|
seq.push(child.description.clone());
|
||||||
children.push(StackChild::Block(Content::sequence(seq)));
|
children.push(StackChild::Block(Content::sequence(seq)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
99
crates/typst-library/src/pdf/embed.rs
Normal file
99
crates/typst-library/src/pdf/embed.rs
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
use ecow::EcoString;
|
||||||
|
use typst_syntax::Spanned;
|
||||||
|
|
||||||
|
use crate::diag::{At, SourceResult};
|
||||||
|
use crate::engine::Engine;
|
||||||
|
use crate::foundations::{elem, Bytes, Cast, Content, Derived, Packed, Show, StyleChain};
|
||||||
|
use crate::introspection::Locatable;
|
||||||
|
use crate::World;
|
||||||
|
|
||||||
|
/// A file that will be embedded into the output PDF.
|
||||||
|
///
|
||||||
|
/// This can be used to distribute additional files that are related to the PDF
|
||||||
|
/// within it. PDF readers will display the files in a file listing.
|
||||||
|
///
|
||||||
|
/// Some international standards use this mechanism to embed machine-readable
|
||||||
|
/// data (e.g., ZUGFeRD/Factur-X for invoices) that mirrors the visual content
|
||||||
|
/// of the PDF.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```typ
|
||||||
|
/// #pdf.embed(
|
||||||
|
/// "experiment.csv",
|
||||||
|
/// relationship: "supplement",
|
||||||
|
/// mime-type: "text/csv",
|
||||||
|
/// description: "Raw Oxygen readings from the Arctic experiment",
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Notes
|
||||||
|
/// - This element is ignored if exporting to a format other than PDF.
|
||||||
|
/// - File embeddings are not currently supported for PDF/A-2, even if the
|
||||||
|
/// embedded file conforms to PDF/A-1 or PDF/A-2.
|
||||||
|
#[elem(Show, Locatable)]
|
||||||
|
pub struct EmbedElem {
|
||||||
|
/// Path of the file to be embedded.
|
||||||
|
///
|
||||||
|
/// Must always be specified, but is only read from if no data is provided
|
||||||
|
/// in the following argument.
|
||||||
|
///
|
||||||
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
|
#[required]
|
||||||
|
#[parse(
|
||||||
|
let Spanned { v: path, span } =
|
||||||
|
args.expect::<Spanned<EcoString>>("path")?;
|
||||||
|
let id = span.resolve_path(&path).at(span)?;
|
||||||
|
// The derived part is the project-relative resolved path.
|
||||||
|
let resolved = id.vpath().as_rootless_path().to_string_lossy().replace("\\", "/").into();
|
||||||
|
Derived::new(path.clone(), resolved)
|
||||||
|
)]
|
||||||
|
#[borrowed]
|
||||||
|
pub path: Derived<EcoString, EcoString>,
|
||||||
|
|
||||||
|
/// Raw file data, optionally.
|
||||||
|
///
|
||||||
|
/// If omitted, the data is read from the specified path.
|
||||||
|
#[positional]
|
||||||
|
// Not actually required as an argument, but always present as a field.
|
||||||
|
// We can't distinguish between the two at the moment.
|
||||||
|
#[required]
|
||||||
|
#[parse(
|
||||||
|
match args.find::<Bytes>()? {
|
||||||
|
Some(data) => data,
|
||||||
|
None => engine.world.file(id).at(span)?,
|
||||||
|
}
|
||||||
|
)]
|
||||||
|
pub data: Bytes,
|
||||||
|
|
||||||
|
/// The relationship of the embedded file to the document.
|
||||||
|
///
|
||||||
|
/// Ignored if export doesn't target PDF/A-3.
|
||||||
|
pub relationship: Option<EmbeddedFileRelationship>,
|
||||||
|
|
||||||
|
/// The MIME type of the embedded file.
|
||||||
|
#[borrowed]
|
||||||
|
pub mime_type: Option<EcoString>,
|
||||||
|
|
||||||
|
/// A description for the embedded file.
|
||||||
|
#[borrowed]
|
||||||
|
pub description: Option<EcoString>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Show for Packed<EmbedElem> {
|
||||||
|
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
||||||
|
Ok(Content::empty())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The relationship of an embedded file with the document.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
|
||||||
|
pub enum EmbeddedFileRelationship {
|
||||||
|
/// The PDF document was created from the source file.
|
||||||
|
Source,
|
||||||
|
/// The file was used to derive a visual presentation in the PDF.
|
||||||
|
Data,
|
||||||
|
/// An alternative representation of the document.
|
||||||
|
Alternative,
|
||||||
|
/// Additional resources for the document.
|
||||||
|
Supplement,
|
||||||
|
}
|
24
crates/typst-library/src/pdf/mod.rs
Normal file
24
crates/typst-library/src/pdf/mod.rs
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
//! PDF-specific functionality.
|
||||||
|
|
||||||
|
mod embed;
|
||||||
|
|
||||||
|
pub use self::embed::*;
|
||||||
|
|
||||||
|
use crate::foundations::{category, Category, Module, Scope};
|
||||||
|
|
||||||
|
/// PDF-specific functionality.
|
||||||
|
#[category]
|
||||||
|
pub static PDF: Category;
|
||||||
|
|
||||||
|
/// Hook up the `pdf` module.
|
||||||
|
pub(super) fn define(global: &mut Scope) {
|
||||||
|
global.category(PDF);
|
||||||
|
global.define_module(module());
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Hook up all `pdf` definitions.
|
||||||
|
pub fn module() -> Module {
|
||||||
|
let mut scope = Scope::deduplicating();
|
||||||
|
scope.define_elem::<EmbedElem>();
|
||||||
|
Module::new("pdf", scope)
|
||||||
|
}
|
@ -81,7 +81,7 @@ pub struct UnderlineElem {
|
|||||||
impl Show for Packed<UnderlineElem> {
|
impl Show for Packed<UnderlineElem> {
|
||||||
#[typst_macros::time(name = "underline", span = self.span())]
|
#[typst_macros::time(name = "underline", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(TextElem::set_deco(smallvec![Decoration {
|
Ok(self.body.clone().styled(TextElem::set_deco(smallvec![Decoration {
|
||||||
line: DecoLine::Underline {
|
line: DecoLine::Underline {
|
||||||
stroke: self.stroke(styles).unwrap_or_default(),
|
stroke: self.stroke(styles).unwrap_or_default(),
|
||||||
offset: self.offset(styles),
|
offset: self.offset(styles),
|
||||||
@ -173,7 +173,7 @@ pub struct OverlineElem {
|
|||||||
impl Show for Packed<OverlineElem> {
|
impl Show for Packed<OverlineElem> {
|
||||||
#[typst_macros::time(name = "overline", span = self.span())]
|
#[typst_macros::time(name = "overline", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(TextElem::set_deco(smallvec![Decoration {
|
Ok(self.body.clone().styled(TextElem::set_deco(smallvec![Decoration {
|
||||||
line: DecoLine::Overline {
|
line: DecoLine::Overline {
|
||||||
stroke: self.stroke(styles).unwrap_or_default(),
|
stroke: self.stroke(styles).unwrap_or_default(),
|
||||||
offset: self.offset(styles),
|
offset: self.offset(styles),
|
||||||
@ -250,7 +250,7 @@ pub struct StrikeElem {
|
|||||||
impl Show for Packed<StrikeElem> {
|
impl Show for Packed<StrikeElem> {
|
||||||
#[typst_macros::time(name = "strike", span = self.span())]
|
#[typst_macros::time(name = "strike", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(TextElem::set_deco(smallvec![Decoration {
|
Ok(self.body.clone().styled(TextElem::set_deco(smallvec![Decoration {
|
||||||
// Note that we do not support evade option for strikethrough.
|
// Note that we do not support evade option for strikethrough.
|
||||||
line: DecoLine::Strikethrough {
|
line: DecoLine::Strikethrough {
|
||||||
stroke: self.stroke(styles).unwrap_or_default(),
|
stroke: self.stroke(styles).unwrap_or_default(),
|
||||||
@ -345,7 +345,7 @@ pub struct HighlightElem {
|
|||||||
impl Show for Packed<HighlightElem> {
|
impl Show for Packed<HighlightElem> {
|
||||||
#[typst_macros::time(name = "highlight", span = self.span())]
|
#[typst_macros::time(name = "highlight", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(TextElem::set_deco(smallvec![Decoration {
|
Ok(self.body.clone().styled(TextElem::set_deco(smallvec![Decoration {
|
||||||
line: DecoLine::Highlight {
|
line: DecoLine::Highlight {
|
||||||
fill: self.fill(styles),
|
fill: self.fill(styles),
|
||||||
stroke: self
|
stroke: self
|
||||||
|
@ -7,6 +7,7 @@ use typst_syntax::Span;
|
|||||||
use usvg::tiny_skia_path;
|
use usvg::tiny_skia_path;
|
||||||
use xmlwriter::XmlWriter;
|
use xmlwriter::XmlWriter;
|
||||||
|
|
||||||
|
use crate::foundations::Bytes;
|
||||||
use crate::layout::{Abs, Frame, FrameItem, Point, Size};
|
use crate::layout::{Abs, Frame, FrameItem, Point, Size};
|
||||||
use crate::text::{Font, Glyph};
|
use crate::text::{Font, Glyph};
|
||||||
use crate::visualize::{FixedStroke, Geometry, Image, RasterFormat, VectorFormat};
|
use crate::visualize::{FixedStroke, Geometry, Image, RasterFormat, VectorFormat};
|
||||||
@ -101,8 +102,12 @@ fn draw_raster_glyph(
|
|||||||
upem: Abs,
|
upem: Abs,
|
||||||
raster_image: ttf_parser::RasterGlyphImage,
|
raster_image: ttf_parser::RasterGlyphImage,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let image =
|
let image = Image::new(
|
||||||
Image::new(raster_image.data.into(), RasterFormat::Png.into(), None).ok()?;
|
Bytes::new(raster_image.data.to_vec()),
|
||||||
|
RasterFormat::Png.into(),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
// Apple Color emoji doesn't provide offset information (or at least
|
// Apple Color emoji doesn't provide offset information (or at least
|
||||||
// not in a way ttf-parser understands), so we artificially shift their
|
// not in a way ttf-parser understands), so we artificially shift their
|
||||||
@ -175,7 +180,7 @@ fn draw_colr_glyph(
|
|||||||
|
|
||||||
let data = svg.end_document().into_bytes();
|
let data = svg.end_document().into_bytes();
|
||||||
|
|
||||||
let image = Image::new(data.into(), VectorFormat::Svg.into(), None).ok()?;
|
let image = Image::new(Bytes::new(data), VectorFormat::Svg.into(), None).ok()?;
|
||||||
|
|
||||||
let y_shift = Abs::pt(upem.to_pt() - y_max);
|
let y_shift = Abs::pt(upem.to_pt() - y_max);
|
||||||
let position = Point::new(Abs::pt(x_min), y_shift);
|
let position = Point::new(Abs::pt(x_min), y_shift);
|
||||||
@ -251,7 +256,7 @@ fn draw_svg_glyph(
|
|||||||
);
|
);
|
||||||
|
|
||||||
let image =
|
let image =
|
||||||
Image::new(wrapper_svg.into_bytes().into(), VectorFormat::Svg.into(), None)
|
Image::new(Bytes::new(wrapper_svg.into_bytes()), VectorFormat::Svg.into(), None)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
let position = Point::new(Abs::pt(left), Abs::pt(top) + upem);
|
let position = Point::new(Abs::pt(left), Abs::pt(top) + upem);
|
||||||
|
@ -228,6 +228,8 @@ static EXCEPTION_MAP: phf::Map<&'static str, Exception> = phf::phf_map! {
|
|||||||
.style(FontStyle::Oblique),
|
.style(FontStyle::Oblique),
|
||||||
"NewCMSans10-Regular" => Exception::new()
|
"NewCMSans10-Regular" => Exception::new()
|
||||||
.family("New Computer Modern Sans"),
|
.family("New Computer Modern Sans"),
|
||||||
|
"NewCMSansMath-Regular" => Exception::new()
|
||||||
|
.family("New Computer Modern Sans Math"),
|
||||||
"NewCMUncial08-Bold" => Exception::new()
|
"NewCMUncial08-Bold" => Exception::new()
|
||||||
.family("New Computer Modern Uncial 08"),
|
.family("New Computer Modern Uncial 08"),
|
||||||
"NewCMUncial08-Book" => Exception::new()
|
"NewCMUncial08-Book" => Exception::new()
|
||||||
|
@ -555,6 +555,7 @@ pub struct TextElem {
|
|||||||
/// #lorem(10)
|
/// #lorem(10)
|
||||||
/// ```
|
/// ```
|
||||||
#[fold]
|
#[fold]
|
||||||
|
#[ghost]
|
||||||
pub costs: Costs,
|
pub costs: Costs,
|
||||||
|
|
||||||
/// Whether to apply kerning.
|
/// Whether to apply kerning.
|
||||||
@ -793,7 +794,7 @@ impl Construct for TextElem {
|
|||||||
|
|
||||||
impl PlainText for Packed<TextElem> {
|
impl PlainText for Packed<TextElem> {
|
||||||
fn plain_text(&self, text: &mut EcoString) {
|
fn plain_text(&self, text: &mut EcoString) {
|
||||||
text.push_str(self.text());
|
text.push_str(&self.text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1431,3 +1432,13 @@ fn check_font_list(engine: &mut Engine, list: &Spanned<FontList>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_text_elem_size() {
|
||||||
|
assert_eq!(std::mem::size_of::<TextElem>(), std::mem::size_of::<EcoString>());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,23 +1,25 @@
|
|||||||
use std::cell::LazyCell;
|
use std::cell::LazyCell;
|
||||||
use std::hash::Hash;
|
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
use std::sync::{Arc, LazyLock};
|
use std::sync::{Arc, LazyLock};
|
||||||
|
|
||||||
|
use comemo::Tracked;
|
||||||
use ecow::{eco_format, EcoString, EcoVec};
|
use ecow::{eco_format, EcoString, EcoVec};
|
||||||
use syntect::highlighting::{self as synt, Theme};
|
use syntect::highlighting as synt;
|
||||||
use syntect::parsing::{SyntaxDefinition, SyntaxSet, SyntaxSetBuilder};
|
use syntect::parsing::{SyntaxDefinition, SyntaxSet, SyntaxSetBuilder};
|
||||||
use typst_syntax::{split_newlines, LinkedNode, Span, Spanned};
|
use typst_syntax::{split_newlines, LinkedNode, Span, Spanned};
|
||||||
|
use typst_utils::ManuallyHash;
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
use super::Lang;
|
use super::Lang;
|
||||||
use crate::diag::{At, FileError, HintedStrResult, SourceResult, StrResult};
|
use crate::diag::{At, FileError, SourceResult, StrResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, elem, scope, Args, Array, Bytes, Content, Fold, NativeElement, Packed,
|
cast, elem, scope, Bytes, Content, Derived, NativeElement, OneOrMultiple, Packed,
|
||||||
PlainText, Show, ShowSet, Smart, StyleChain, Styles, Synthesize, TargetElem, Value,
|
PlainText, Show, ShowSet, Smart, StyleChain, Styles, Synthesize, TargetElem,
|
||||||
};
|
};
|
||||||
use crate::html::{tag, HtmlElem};
|
use crate::html::{tag, HtmlElem};
|
||||||
use crate::layout::{BlockBody, BlockElem, Em, HAlignment};
|
use crate::layout::{BlockBody, BlockElem, Em, HAlignment};
|
||||||
|
use crate::loading::{DataSource, Load};
|
||||||
use crate::model::{Figurable, ParElem};
|
use crate::model::{Figurable, ParElem};
|
||||||
use crate::text::{
|
use crate::text::{
|
||||||
FontFamily, FontList, Hyphenate, LinebreakElem, LocalName, TextElem, TextSize,
|
FontFamily, FontList, Hyphenate, LinebreakElem, LocalName, TextElem, TextSize,
|
||||||
@ -25,12 +27,6 @@ use crate::text::{
|
|||||||
use crate::visualize::Color;
|
use crate::visualize::Color;
|
||||||
use crate::World;
|
use crate::World;
|
||||||
|
|
||||||
// Shorthand for highlighter closures.
|
|
||||||
type StyleFn<'a> =
|
|
||||||
&'a mut dyn FnMut(usize, &LinkedNode, Range<usize>, synt::Style) -> Content;
|
|
||||||
type LineFn<'a> = &'a mut dyn FnMut(usize, Range<usize>, &mut Vec<Content>);
|
|
||||||
type ThemeArgType = Smart<Option<EcoString>>;
|
|
||||||
|
|
||||||
/// Raw text with optional syntax highlighting.
|
/// Raw text with optional syntax highlighting.
|
||||||
///
|
///
|
||||||
/// Displays the text verbatim and in a monospace font. This is typically used
|
/// Displays the text verbatim and in a monospace font. This is typically used
|
||||||
@ -186,9 +182,15 @@ pub struct RawElem {
|
|||||||
#[default(HAlignment::Start)]
|
#[default(HAlignment::Start)]
|
||||||
pub align: HAlignment,
|
pub align: HAlignment,
|
||||||
|
|
||||||
/// One or multiple additional syntax definitions to load. The syntax
|
/// Additional syntax definitions to load. The syntax definitions should be
|
||||||
/// definitions should be in the
|
/// in the [`sublime-syntax` file format](https://www.sublimetext.com/docs/syntax.html).
|
||||||
/// [`sublime-syntax` file format](https://www.sublimetext.com/docs/syntax.html).
|
///
|
||||||
|
/// You can pass any of the following values:
|
||||||
|
///
|
||||||
|
/// - A path string to load a syntax file from the given path. For more
|
||||||
|
/// details about paths, see the [Paths section]($syntax/#paths).
|
||||||
|
/// - Raw bytes from which the syntax should be decoded.
|
||||||
|
/// - An array where each item is one the above.
|
||||||
///
|
///
|
||||||
/// ````example
|
/// ````example
|
||||||
/// #set raw(syntaxes: "SExpressions.sublime-syntax")
|
/// #set raw(syntaxes: "SExpressions.sublime-syntax")
|
||||||
@ -201,22 +203,24 @@ pub struct RawElem {
|
|||||||
/// (* x (factorial (- x 1)))))
|
/// (* x (factorial (- x 1)))))
|
||||||
/// ```
|
/// ```
|
||||||
/// ````
|
/// ````
|
||||||
#[parse(
|
#[parse(match args.named("syntaxes")? {
|
||||||
let (syntaxes, syntaxes_data) = parse_syntaxes(engine, args)?;
|
Some(sources) => Some(RawSyntax::load(engine.world, sources)?),
|
||||||
syntaxes
|
None => None,
|
||||||
)]
|
})]
|
||||||
#[fold]
|
#[fold]
|
||||||
pub syntaxes: SyntaxPaths,
|
pub syntaxes: Derived<OneOrMultiple<DataSource>, Vec<RawSyntax>>,
|
||||||
|
|
||||||
/// The raw file buffers of syntax definition files.
|
/// The theme to use for syntax highlighting. Themes should be in the
|
||||||
#[internal]
|
|
||||||
#[parse(syntaxes_data)]
|
|
||||||
#[fold]
|
|
||||||
pub syntaxes_data: Vec<Bytes>,
|
|
||||||
|
|
||||||
/// The theme to use for syntax highlighting. Theme files should be in the
|
|
||||||
/// [`tmTheme` file format](https://www.sublimetext.com/docs/color_schemes_tmtheme.html).
|
/// [`tmTheme` file format](https://www.sublimetext.com/docs/color_schemes_tmtheme.html).
|
||||||
///
|
///
|
||||||
|
/// You can pass any of the following values:
|
||||||
|
///
|
||||||
|
/// - `{none}`: Disables syntax highlighting.
|
||||||
|
/// - `{auto}`: Highlights with Typst's default theme.
|
||||||
|
/// - A path string to load a theme file from the given path. For more
|
||||||
|
/// details about paths, see the [Paths section]($syntax/#paths).
|
||||||
|
/// - Raw bytes from which the theme should be decoded.
|
||||||
|
///
|
||||||
/// Applying a theme only affects the color of specifically highlighted
|
/// Applying a theme only affects the color of specifically highlighted
|
||||||
/// text. It does not consider the theme's foreground and background
|
/// text. It does not consider the theme's foreground and background
|
||||||
/// properties, so that you retain control over the color of raw text. You
|
/// properties, so that you retain control over the color of raw text. You
|
||||||
@ -224,8 +228,6 @@ pub struct RawElem {
|
|||||||
/// the background with a [filled block]($block.fill). You could also use
|
/// the background with a [filled block]($block.fill). You could also use
|
||||||
/// the [`xml`] function to extract these properties from the theme.
|
/// the [`xml`] function to extract these properties from the theme.
|
||||||
///
|
///
|
||||||
/// Additionally, you can set the theme to `{none}` to disable highlighting.
|
|
||||||
///
|
|
||||||
/// ````example
|
/// ````example
|
||||||
/// #set raw(theme: "halcyon.tmTheme")
|
/// #set raw(theme: "halcyon.tmTheme")
|
||||||
/// #show raw: it => block(
|
/// #show raw: it => block(
|
||||||
@ -240,18 +242,16 @@ pub struct RawElem {
|
|||||||
/// #let hi = "Hello World"
|
/// #let hi = "Hello World"
|
||||||
/// ```
|
/// ```
|
||||||
/// ````
|
/// ````
|
||||||
#[parse(
|
#[parse(match args.named::<Spanned<Smart<Option<DataSource>>>>("theme")? {
|
||||||
let (theme_path, theme_data) = parse_theme(engine, args)?;
|
Some(Spanned { v: Smart::Custom(Some(source)), span }) => Some(Smart::Custom(
|
||||||
theme_path
|
Some(RawTheme::load(engine.world, Spanned::new(source, span))?)
|
||||||
)]
|
)),
|
||||||
|
Some(Spanned { v: Smart::Custom(None), .. }) => Some(Smart::Custom(None)),
|
||||||
|
Some(Spanned { v: Smart::Auto, .. }) => Some(Smart::Auto),
|
||||||
|
None => None,
|
||||||
|
})]
|
||||||
#[borrowed]
|
#[borrowed]
|
||||||
pub theme: ThemeArgType,
|
pub theme: Smart<Option<Derived<DataSource, RawTheme>>>,
|
||||||
|
|
||||||
/// The raw file buffer of syntax theme file.
|
|
||||||
#[internal]
|
|
||||||
#[parse(theme_data.map(Some))]
|
|
||||||
#[borrowed]
|
|
||||||
pub theme_data: Option<Bytes>,
|
|
||||||
|
|
||||||
/// The size for a tab stop in spaces. A tab is replaced with enough spaces to
|
/// The size for a tab stop in spaces. A tab is replaced with enough spaces to
|
||||||
/// align with the next multiple of the size.
|
/// align with the next multiple of the size.
|
||||||
@ -315,7 +315,7 @@ impl Packed<RawElem> {
|
|||||||
#[comemo::memoize]
|
#[comemo::memoize]
|
||||||
fn highlight(&self, styles: StyleChain) -> Vec<Packed<RawLine>> {
|
fn highlight(&self, styles: StyleChain) -> Vec<Packed<RawLine>> {
|
||||||
let elem = self.as_ref();
|
let elem = self.as_ref();
|
||||||
let lines = preprocess(elem.text(), styles, self.span());
|
let lines = preprocess(&elem.text, styles, self.span());
|
||||||
|
|
||||||
let count = lines.len() as i64;
|
let count = lines.len() as i64;
|
||||||
let lang = elem
|
let lang = elem
|
||||||
@ -325,9 +325,6 @@ impl Packed<RawElem> {
|
|||||||
.map(|s| s.to_lowercase())
|
.map(|s| s.to_lowercase())
|
||||||
.or(Some("txt".into()));
|
.or(Some("txt".into()));
|
||||||
|
|
||||||
let extra_syntaxes = LazyCell::new(|| {
|
|
||||||
load_syntaxes(&elem.syntaxes(styles), &elem.syntaxes_data(styles)).unwrap()
|
|
||||||
});
|
|
||||||
let non_highlighted_result = |lines: EcoVec<(EcoString, Span)>| {
|
let non_highlighted_result = |lines: EcoVec<(EcoString, Span)>| {
|
||||||
lines.into_iter().enumerate().map(|(i, (line, line_span))| {
|
lines.into_iter().enumerate().map(|(i, (line, line_span))| {
|
||||||
Packed::new(RawLine::new(
|
Packed::new(RawLine::new(
|
||||||
@ -340,17 +337,13 @@ impl Packed<RawElem> {
|
|||||||
})
|
})
|
||||||
};
|
};
|
||||||
|
|
||||||
let theme = elem.theme(styles).as_ref().as_ref().map(|theme_path| {
|
let syntaxes = LazyCell::new(|| elem.syntaxes(styles));
|
||||||
theme_path.as_ref().map(|path| {
|
let theme: &synt::Theme = match elem.theme(styles) {
|
||||||
load_theme(path, elem.theme_data(styles).as_ref().as_ref().unwrap())
|
|
||||||
.unwrap()
|
|
||||||
})
|
|
||||||
});
|
|
||||||
let theme: &Theme = match theme {
|
|
||||||
Smart::Auto => &RAW_THEME,
|
Smart::Auto => &RAW_THEME,
|
||||||
Smart::Custom(Some(ref theme)) => theme,
|
Smart::Custom(Some(theme)) => theme.derived.get(),
|
||||||
Smart::Custom(None) => return non_highlighted_result(lines).collect(),
|
Smart::Custom(None) => return non_highlighted_result(lines).collect(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let foreground = theme.settings.foreground.unwrap_or(synt::Color::BLACK);
|
let foreground = theme.settings.foreground.unwrap_or(synt::Color::BLACK);
|
||||||
|
|
||||||
let mut seq = vec![];
|
let mut seq = vec![];
|
||||||
@ -391,13 +384,14 @@ impl Packed<RawElem> {
|
|||||||
)
|
)
|
||||||
.highlight();
|
.highlight();
|
||||||
} else if let Some((syntax_set, syntax)) = lang.and_then(|token| {
|
} else if let Some((syntax_set, syntax)) = lang.and_then(|token| {
|
||||||
RAW_SYNTAXES
|
// Prefer user-provided syntaxes over built-in ones.
|
||||||
.find_syntax_by_token(&token)
|
syntaxes
|
||||||
.map(|syntax| (&*RAW_SYNTAXES, syntax))
|
.derived
|
||||||
.or_else(|| {
|
.iter()
|
||||||
extra_syntaxes
|
.map(|syntax| syntax.get())
|
||||||
.find_syntax_by_token(&token)
|
.chain(std::iter::once(&*RAW_SYNTAXES))
|
||||||
.map(|syntax| (&**extra_syntaxes, syntax))
|
.find_map(|set| {
|
||||||
|
set.find_syntax_by_token(&token).map(|syntax| (set, syntax))
|
||||||
})
|
})
|
||||||
}) {
|
}) {
|
||||||
let mut highlighter = syntect::easy::HighlightLines::new(syntax, theme);
|
let mut highlighter = syntect::easy::HighlightLines::new(syntax, theme);
|
||||||
@ -496,7 +490,7 @@ impl Figurable for Packed<RawElem> {}
|
|||||||
|
|
||||||
impl PlainText for Packed<RawElem> {
|
impl PlainText for Packed<RawElem> {
|
||||||
fn plain_text(&self, text: &mut EcoString) {
|
fn plain_text(&self, text: &mut EcoString) {
|
||||||
text.push_str(&self.text().get());
|
text.push_str(&self.text.get());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -532,6 +526,89 @@ cast! {
|
|||||||
v: EcoString => Self::Text(v),
|
v: EcoString => Self::Text(v),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A loaded syntax.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
|
pub struct RawSyntax(Arc<ManuallyHash<SyntaxSet>>);
|
||||||
|
|
||||||
|
impl RawSyntax {
|
||||||
|
/// Load syntaxes from sources.
|
||||||
|
fn load(
|
||||||
|
world: Tracked<dyn World + '_>,
|
||||||
|
sources: Spanned<OneOrMultiple<DataSource>>,
|
||||||
|
) -> SourceResult<Derived<OneOrMultiple<DataSource>, Vec<RawSyntax>>> {
|
||||||
|
let data = sources.load(world)?;
|
||||||
|
let list = sources
|
||||||
|
.v
|
||||||
|
.0
|
||||||
|
.iter()
|
||||||
|
.zip(&data)
|
||||||
|
.map(|(source, data)| Self::decode(source, data))
|
||||||
|
.collect::<StrResult<_>>()
|
||||||
|
.at(sources.span)?;
|
||||||
|
Ok(Derived::new(sources.v, list))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode a syntax from a loaded source.
|
||||||
|
#[comemo::memoize]
|
||||||
|
#[typst_macros::time(name = "load syntaxes")]
|
||||||
|
fn decode(source: &DataSource, data: &Bytes) -> StrResult<RawSyntax> {
|
||||||
|
let src = data.as_str().map_err(FileError::from)?;
|
||||||
|
let syntax = SyntaxDefinition::load_from_str(src, false, None).map_err(
|
||||||
|
|err| match source {
|
||||||
|
DataSource::Path(path) => {
|
||||||
|
eco_format!("failed to parse syntax file `{path}` ({err})")
|
||||||
|
}
|
||||||
|
DataSource::Bytes(_) => {
|
||||||
|
eco_format!("failed to parse syntax ({err})")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut builder = SyntaxSetBuilder::new();
|
||||||
|
builder.add(syntax);
|
||||||
|
|
||||||
|
Ok(RawSyntax(Arc::new(ManuallyHash::new(
|
||||||
|
builder.build(),
|
||||||
|
typst_utils::hash128(data),
|
||||||
|
))))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the underlying syntax set.
|
||||||
|
fn get(&self) -> &SyntaxSet {
|
||||||
|
self.0.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A loaded syntect theme.
|
||||||
|
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||||
|
pub struct RawTheme(Arc<ManuallyHash<synt::Theme>>);
|
||||||
|
|
||||||
|
impl RawTheme {
|
||||||
|
/// Load a theme from a data source.
|
||||||
|
fn load(
|
||||||
|
world: Tracked<dyn World + '_>,
|
||||||
|
source: Spanned<DataSource>,
|
||||||
|
) -> SourceResult<Derived<DataSource, Self>> {
|
||||||
|
let data = source.load(world)?;
|
||||||
|
let theme = Self::decode(&data).at(source.span)?;
|
||||||
|
Ok(Derived::new(source.v, theme))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Decode a theme from bytes.
|
||||||
|
#[comemo::memoize]
|
||||||
|
fn decode(data: &Bytes) -> StrResult<RawTheme> {
|
||||||
|
let mut cursor = std::io::Cursor::new(data.as_slice());
|
||||||
|
let theme = synt::ThemeSet::load_from_reader(&mut cursor)
|
||||||
|
.map_err(|err| eco_format!("failed to parse theme ({err})"))?;
|
||||||
|
Ok(RawTheme(Arc::new(ManuallyHash::new(theme, typst_utils::hash128(data)))))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the underlying syntect theme.
|
||||||
|
pub fn get(&self) -> &synt::Theme {
|
||||||
|
self.0.as_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A highlighted line of raw text.
|
/// A highlighted line of raw text.
|
||||||
///
|
///
|
||||||
/// This is a helper element that is synthesized by [`raw`] elements.
|
/// This is a helper element that is synthesized by [`raw`] elements.
|
||||||
@ -561,13 +638,13 @@ pub struct RawLine {
|
|||||||
impl Show for Packed<RawLine> {
|
impl Show for Packed<RawLine> {
|
||||||
#[typst_macros::time(name = "raw.line", span = self.span())]
|
#[typst_macros::time(name = "raw.line", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, _styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, _styles: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone())
|
Ok(self.body.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PlainText for Packed<RawLine> {
|
impl PlainText for Packed<RawLine> {
|
||||||
fn plain_text(&self, text: &mut EcoString) {
|
fn plain_text(&self, text: &mut EcoString) {
|
||||||
text.push_str(self.text());
|
text.push_str(&self.text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -593,6 +670,11 @@ struct ThemedHighlighter<'a> {
|
|||||||
line_fn: LineFn<'a>,
|
line_fn: LineFn<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Shorthands for highlighter closures.
|
||||||
|
type StyleFn<'a> =
|
||||||
|
&'a mut dyn FnMut(usize, &LinkedNode, Range<usize>, synt::Style) -> Content;
|
||||||
|
type LineFn<'a> = &'a mut dyn FnMut(usize, Range<usize>, &mut Vec<Content>);
|
||||||
|
|
||||||
impl<'a> ThemedHighlighter<'a> {
|
impl<'a> ThemedHighlighter<'a> {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
code: &'a str,
|
code: &'a str,
|
||||||
@ -738,108 +820,50 @@ fn to_syn(color: Color) -> synt::Color {
|
|||||||
synt::Color { r, g, b, a }
|
synt::Color { r, g, b, a }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A list of raw syntax file paths.
|
/// Create a syntect theme item.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Hash)]
|
fn item(
|
||||||
pub struct SyntaxPaths(Vec<EcoString>);
|
scope: &str,
|
||||||
|
color: Option<&str>,
|
||||||
cast! {
|
font_style: Option<synt::FontStyle>,
|
||||||
SyntaxPaths,
|
) -> synt::ThemeItem {
|
||||||
self => self.0.into_value(),
|
synt::ThemeItem {
|
||||||
v: EcoString => Self(vec![v]),
|
scope: scope.parse().unwrap(),
|
||||||
v: Array => Self(v.into_iter().map(Value::cast).collect::<HintedStrResult<_>>()?),
|
style: synt::StyleModifier {
|
||||||
}
|
foreground: color.map(|s| to_syn(s.parse::<Color>().unwrap())),
|
||||||
|
background: None,
|
||||||
impl Fold for SyntaxPaths {
|
font_style,
|
||||||
fn fold(self, outer: Self) -> Self {
|
},
|
||||||
Self(self.0.fold(outer.0))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Load a syntax set from a list of syntax file paths.
|
/// Replace tabs with spaces to align with multiples of `tab_size`.
|
||||||
#[comemo::memoize]
|
fn align_tabs(text: &str, tab_size: usize) -> EcoString {
|
||||||
#[typst_macros::time(name = "load syntaxes")]
|
let replacement = " ".repeat(tab_size);
|
||||||
fn load_syntaxes(paths: &SyntaxPaths, bytes: &[Bytes]) -> StrResult<Arc<SyntaxSet>> {
|
let divisor = tab_size.max(1);
|
||||||
let mut out = SyntaxSetBuilder::new();
|
let amount = text.chars().filter(|&c| c == '\t').count();
|
||||||
|
|
||||||
// We might have multiple sublime-syntax/yaml files
|
let mut res = EcoString::with_capacity(text.len() - amount + amount * tab_size);
|
||||||
for (path, bytes) in paths.0.iter().zip(bytes.iter()) {
|
let mut column = 0;
|
||||||
let src = std::str::from_utf8(bytes).map_err(FileError::from)?;
|
|
||||||
out.add(SyntaxDefinition::load_from_str(src, false, None).map_err(|err| {
|
for grapheme in text.graphemes(true) {
|
||||||
eco_format!("failed to parse syntax file `{path}` ({err})")
|
match grapheme {
|
||||||
})?);
|
"\t" => {
|
||||||
|
let required = tab_size - column % divisor;
|
||||||
|
res.push_str(&replacement[..required]);
|
||||||
|
column += required;
|
||||||
|
}
|
||||||
|
"\n" => {
|
||||||
|
res.push_str(grapheme);
|
||||||
|
column = 0;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
res.push_str(grapheme);
|
||||||
|
column += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Arc::new(out.build()))
|
res
|
||||||
}
|
|
||||||
|
|
||||||
/// Function to parse the syntaxes argument.
|
|
||||||
/// Much nicer than having it be part of the `element` macro.
|
|
||||||
fn parse_syntaxes(
|
|
||||||
engine: &mut Engine,
|
|
||||||
args: &mut Args,
|
|
||||||
) -> SourceResult<(Option<SyntaxPaths>, Option<Vec<Bytes>>)> {
|
|
||||||
let Some(Spanned { v: paths, span }) =
|
|
||||||
args.named::<Spanned<SyntaxPaths>>("syntaxes")?
|
|
||||||
else {
|
|
||||||
return Ok((None, None));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Load syntax files.
|
|
||||||
let data = paths
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.map(|path| {
|
|
||||||
let id = span.resolve_path(path).at(span)?;
|
|
||||||
engine.world.file(id).at(span)
|
|
||||||
})
|
|
||||||
.collect::<SourceResult<Vec<Bytes>>>()?;
|
|
||||||
|
|
||||||
// Check that parsing works.
|
|
||||||
let _ = load_syntaxes(&paths, &data).at(span)?;
|
|
||||||
|
|
||||||
Ok((Some(paths), Some(data)))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[comemo::memoize]
|
|
||||||
#[typst_macros::time(name = "load theme")]
|
|
||||||
fn load_theme(path: &str, bytes: &Bytes) -> StrResult<Arc<synt::Theme>> {
|
|
||||||
let mut cursor = std::io::Cursor::new(bytes.as_slice());
|
|
||||||
|
|
||||||
synt::ThemeSet::load_from_reader(&mut cursor)
|
|
||||||
.map(Arc::new)
|
|
||||||
.map_err(|err| eco_format!("failed to parse theme file `{path}` ({err})"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Function to parse the theme argument.
|
|
||||||
/// Much nicer than having it be part of the `element` macro.
|
|
||||||
fn parse_theme(
|
|
||||||
engine: &mut Engine,
|
|
||||||
args: &mut Args,
|
|
||||||
) -> SourceResult<(Option<ThemeArgType>, Option<Bytes>)> {
|
|
||||||
let Some(Spanned { v: path, span }) = args.named::<Spanned<ThemeArgType>>("theme")?
|
|
||||||
else {
|
|
||||||
// Argument `theme` not found.
|
|
||||||
return Ok((None, None));
|
|
||||||
};
|
|
||||||
|
|
||||||
let Smart::Custom(path) = path else {
|
|
||||||
// Argument `theme` is `auto`.
|
|
||||||
return Ok((Some(Smart::Auto), None));
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(path) = path else {
|
|
||||||
// Argument `theme` is `none`.
|
|
||||||
return Ok((Some(Smart::Custom(None)), None));
|
|
||||||
};
|
|
||||||
|
|
||||||
// Load theme file.
|
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
|
||||||
let data = engine.world.file(id).at(span)?;
|
|
||||||
|
|
||||||
// Check that parsing works.
|
|
||||||
let _ = load_theme(&path, &data).at(span)?;
|
|
||||||
|
|
||||||
Ok((Some(Smart::Custom(Some(path))), Some(data)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The syntect syntax definitions.
|
/// The syntect syntax definitions.
|
||||||
@ -886,49 +910,3 @@ pub static RAW_THEME: LazyLock<synt::Theme> = LazyLock::new(|| synt::Theme {
|
|||||||
item("markup.deleted, meta.diff.header.from-file", Some("#d73a49"), None),
|
item("markup.deleted, meta.diff.header.from-file", Some("#d73a49"), None),
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
/// Create a syntect theme item.
|
|
||||||
fn item(
|
|
||||||
scope: &str,
|
|
||||||
color: Option<&str>,
|
|
||||||
font_style: Option<synt::FontStyle>,
|
|
||||||
) -> synt::ThemeItem {
|
|
||||||
synt::ThemeItem {
|
|
||||||
scope: scope.parse().unwrap(),
|
|
||||||
style: synt::StyleModifier {
|
|
||||||
foreground: color.map(|s| to_syn(s.parse::<Color>().unwrap())),
|
|
||||||
background: None,
|
|
||||||
font_style,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Replace tabs with spaces to align with multiples of `tab_size`.
|
|
||||||
fn align_tabs(text: &str, tab_size: usize) -> EcoString {
|
|
||||||
let replacement = " ".repeat(tab_size);
|
|
||||||
let divisor = tab_size.max(1);
|
|
||||||
let amount = text.chars().filter(|&c| c == '\t').count();
|
|
||||||
|
|
||||||
let mut res = EcoString::with_capacity(text.len() - amount + amount * tab_size);
|
|
||||||
let mut column = 0;
|
|
||||||
|
|
||||||
for grapheme in text.graphemes(true) {
|
|
||||||
match grapheme {
|
|
||||||
"\t" => {
|
|
||||||
let required = tab_size - column % divisor;
|
|
||||||
res.push_str(&replacement[..required]);
|
|
||||||
column += required;
|
|
||||||
}
|
|
||||||
"\n" => {
|
|
||||||
res.push_str(grapheme);
|
|
||||||
column = 0;
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
res.push_str(grapheme);
|
|
||||||
column += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
@ -50,7 +50,7 @@ pub struct SubElem {
|
|||||||
impl Show for Packed<SubElem> {
|
impl Show for Packed<SubElem> {
|
||||||
#[typst_macros::time(name = "sub", span = self.span())]
|
#[typst_macros::time(name = "sub", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let body = self.body().clone();
|
let body = self.body.clone();
|
||||||
|
|
||||||
if self.typographic(styles) {
|
if self.typographic(styles) {
|
||||||
if let Some(text) = convert_script(&body, true) {
|
if let Some(text) = convert_script(&body, true) {
|
||||||
@ -109,7 +109,7 @@ pub struct SuperElem {
|
|||||||
impl Show for Packed<SuperElem> {
|
impl Show for Packed<SuperElem> {
|
||||||
#[typst_macros::time(name = "super", span = self.span())]
|
#[typst_macros::time(name = "super", span = self.span())]
|
||||||
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
fn show(&self, engine: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||||
let body = self.body().clone();
|
let body = self.body.clone();
|
||||||
|
|
||||||
if self.typographic(styles) {
|
if self.typographic(styles) {
|
||||||
if let Some(text) = convert_script(&body, false) {
|
if let Some(text) = convert_script(&body, false) {
|
||||||
@ -132,9 +132,9 @@ fn convert_script(content: &Content, sub: bool) -> Option<EcoString> {
|
|||||||
Some(' '.into())
|
Some(' '.into())
|
||||||
} else if let Some(elem) = content.to_packed::<TextElem>() {
|
} else if let Some(elem) = content.to_packed::<TextElem>() {
|
||||||
if sub {
|
if sub {
|
||||||
elem.text().chars().map(to_subscript_codepoint).collect()
|
elem.text.chars().map(to_subscript_codepoint).collect()
|
||||||
} else {
|
} else {
|
||||||
elem.text().chars().map(to_superscript_codepoint).collect()
|
elem.text.chars().map(to_superscript_codepoint).collect()
|
||||||
}
|
}
|
||||||
} else if let Some(sequence) = content.to_packed::<SequenceElem>() {
|
} else if let Some(sequence) = content.to_packed::<SequenceElem>() {
|
||||||
sequence
|
sequence
|
||||||
|
@ -53,6 +53,6 @@ pub struct SmallcapsElem {
|
|||||||
impl Show for Packed<SmallcapsElem> {
|
impl Show for Packed<SmallcapsElem> {
|
||||||
#[typst_macros::time(name = "smallcaps", span = self.span())]
|
#[typst_macros::time(name = "smallcaps", span = self.span())]
|
||||||
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
||||||
Ok(self.body().clone().styled(TextElem::set_smallcaps(true)))
|
Ok(self.body.clone().styled(TextElem::set_smallcaps(true)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -248,8 +248,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn luma(
|
pub fn luma(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The lightness component.
|
/// The lightness component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -300,8 +298,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn oklab(
|
pub fn oklab(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The lightness component.
|
/// The lightness component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -358,8 +354,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn oklch(
|
pub fn oklch(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The lightness component.
|
/// The lightness component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -420,8 +414,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Linear RGB")]
|
#[func(title = "Linear RGB")]
|
||||||
pub fn linear_rgb(
|
pub fn linear_rgb(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The red component.
|
/// The red component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -477,8 +469,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "RGB")]
|
#[func(title = "RGB")]
|
||||||
pub fn rgb(
|
pub fn rgb(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The red component.
|
/// The red component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -555,8 +545,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "CMYK")]
|
#[func(title = "CMYK")]
|
||||||
pub fn cmyk(
|
pub fn cmyk(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The cyan component.
|
/// The cyan component.
|
||||||
#[external]
|
#[external]
|
||||||
@ -614,8 +602,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "HSL")]
|
#[func(title = "HSL")]
|
||||||
pub fn hsl(
|
pub fn hsl(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The hue angle.
|
/// The hue angle.
|
||||||
#[external]
|
#[external]
|
||||||
@ -673,8 +659,6 @@ impl Color {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "HSV")]
|
#[func(title = "HSV")]
|
||||||
pub fn hsv(
|
pub fn hsv(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The hue angle.
|
/// The hue angle.
|
||||||
#[external]
|
#[external]
|
||||||
@ -898,7 +882,6 @@ impl Color {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn saturate(
|
pub fn saturate(
|
||||||
self,
|
self,
|
||||||
/// The call span
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The factor to saturate the color by.
|
/// The factor to saturate the color by.
|
||||||
factor: Ratio,
|
factor: Ratio,
|
||||||
@ -924,7 +907,6 @@ impl Color {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn desaturate(
|
pub fn desaturate(
|
||||||
self,
|
self,
|
||||||
/// The call span
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The factor to desaturate the color by.
|
/// The factor to desaturate the color by.
|
||||||
factor: Ratio,
|
factor: Ratio,
|
||||||
@ -1001,7 +983,6 @@ impl Color {
|
|||||||
#[func]
|
#[func]
|
||||||
pub fn rotate(
|
pub fn rotate(
|
||||||
self,
|
self,
|
||||||
/// The call span
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The angle to rotate the hue by.
|
/// The angle to rotate the hue by.
|
||||||
angle: Angle,
|
angle: Angle,
|
||||||
|
@ -200,9 +200,7 @@ impl Gradient {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Linear Gradient")]
|
#[func(title = "Linear Gradient")]
|
||||||
pub fn linear(
|
pub fn linear(
|
||||||
/// The args of this function.
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
/// The call site of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The color [stops](#stops) of the gradient.
|
/// The color [stops](#stops) of the gradient.
|
||||||
#[variadic]
|
#[variadic]
|
||||||
@ -292,7 +290,6 @@ impl Gradient {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
fn radial(
|
fn radial(
|
||||||
/// The call site of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The color [stops](#stops) of the gradient.
|
/// The color [stops](#stops) of the gradient.
|
||||||
#[variadic]
|
#[variadic]
|
||||||
@ -407,7 +404,6 @@ impl Gradient {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func]
|
#[func]
|
||||||
pub fn conic(
|
pub fn conic(
|
||||||
/// The call site of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The color [stops](#stops) of the gradient.
|
/// The color [stops](#stops) of the gradient.
|
||||||
#[variadic]
|
#[variadic]
|
||||||
|
@ -14,14 +14,14 @@ use ecow::EcoString;
|
|||||||
use typst_syntax::{Span, Spanned};
|
use typst_syntax::{Span, Spanned};
|
||||||
use typst_utils::LazyHash;
|
use typst_utils::LazyHash;
|
||||||
|
|
||||||
use crate::diag::{At, SourceResult, StrResult};
|
use crate::diag::{SourceResult, StrResult};
|
||||||
use crate::engine::Engine;
|
use crate::engine::Engine;
|
||||||
use crate::foundations::{
|
use crate::foundations::{
|
||||||
cast, elem, func, scope, Bytes, Cast, Content, NativeElement, Packed, Show, Smart,
|
cast, elem, func, scope, Bytes, Cast, Content, Derived, NativeElement, Packed, Show,
|
||||||
StyleChain,
|
Smart, StyleChain,
|
||||||
};
|
};
|
||||||
use crate::layout::{BlockElem, Length, Rel, Sizing};
|
use crate::layout::{BlockElem, Length, Rel, Sizing};
|
||||||
use crate::loading::Readable;
|
use crate::loading::{DataSource, Load, Readable};
|
||||||
use crate::model::Figurable;
|
use crate::model::Figurable;
|
||||||
use crate::text::LocalName;
|
use crate::text::LocalName;
|
||||||
use crate::World;
|
use crate::World;
|
||||||
@ -46,25 +46,16 @@ use crate::World;
|
|||||||
/// ```
|
/// ```
|
||||||
#[elem(scope, Show, LocalName, Figurable)]
|
#[elem(scope, Show, LocalName, Figurable)]
|
||||||
pub struct ImageElem {
|
pub struct ImageElem {
|
||||||
/// Path to an image file.
|
/// A path to an image file or raw bytes making up an encoded image.
|
||||||
///
|
///
|
||||||
/// For more details, see the [Paths section]($syntax/#paths).
|
/// For more details about paths, see the [Paths section]($syntax/#paths).
|
||||||
#[required]
|
#[required]
|
||||||
#[parse(
|
#[parse(
|
||||||
let Spanned { v: path, span } =
|
let source = args.expect::<Spanned<DataSource>>("source")?;
|
||||||
args.expect::<Spanned<EcoString>>("path to image file")?;
|
let data = source.load(engine.world)?;
|
||||||
let id = span.resolve_path(&path).at(span)?;
|
Derived::new(source.v, data)
|
||||||
let data = engine.world.file(id).at(span)?;
|
|
||||||
path
|
|
||||||
)]
|
)]
|
||||||
#[borrowed]
|
pub source: Derived<DataSource, Bytes>,
|
||||||
pub path: EcoString,
|
|
||||||
|
|
||||||
/// The raw file data.
|
|
||||||
#[internal]
|
|
||||||
#[required]
|
|
||||||
#[parse(Readable::Bytes(data))]
|
|
||||||
pub data: Readable,
|
|
||||||
|
|
||||||
/// The image's format. Detected automatically by default.
|
/// The image's format. Detected automatically by default.
|
||||||
///
|
///
|
||||||
@ -106,6 +97,9 @@ pub struct ImageElem {
|
|||||||
impl ImageElem {
|
impl ImageElem {
|
||||||
/// Decode a raster or vector graphic from bytes or a string.
|
/// Decode a raster or vector graphic from bytes or a string.
|
||||||
///
|
///
|
||||||
|
/// This function is deprecated. The [`image`] function now accepts bytes
|
||||||
|
/// directly.
|
||||||
|
///
|
||||||
/// ```example
|
/// ```example
|
||||||
/// #let original = read("diagram.svg")
|
/// #let original = read("diagram.svg")
|
||||||
/// #let changed = original.replace(
|
/// #let changed = original.replace(
|
||||||
@ -118,7 +112,6 @@ impl ImageElem {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Decode Image")]
|
#[func(title = "Decode Image")]
|
||||||
pub fn decode(
|
pub fn decode(
|
||||||
/// The call span of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The data to decode as an image. Can be a string for SVGs.
|
/// The data to decode as an image. Can be a string for SVGs.
|
||||||
data: Readable,
|
data: Readable,
|
||||||
@ -138,7 +131,9 @@ impl ImageElem {
|
|||||||
#[named]
|
#[named]
|
||||||
fit: Option<ImageFit>,
|
fit: Option<ImageFit>,
|
||||||
) -> StrResult<Content> {
|
) -> StrResult<Content> {
|
||||||
let mut elem = ImageElem::new(EcoString::new(), data);
|
let bytes = data.into_bytes();
|
||||||
|
let source = Derived::new(DataSource::Bytes(bytes.clone()), bytes);
|
||||||
|
let mut elem = ImageElem::new(source);
|
||||||
if let Some(format) = format {
|
if let Some(format) = format {
|
||||||
elem.push_format(format);
|
elem.push_format(format);
|
||||||
}
|
}
|
||||||
@ -337,6 +332,22 @@ pub enum ImageFormat {
|
|||||||
Vector(VectorFormat),
|
Vector(VectorFormat),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ImageFormat {
|
||||||
|
/// Try to detect the format of an image from data.
|
||||||
|
pub fn detect(data: &[u8]) -> Option<Self> {
|
||||||
|
if let Some(format) = RasterFormat::detect(data) {
|
||||||
|
return Some(Self::Raster(format));
|
||||||
|
}
|
||||||
|
|
||||||
|
// SVG or compressed SVG.
|
||||||
|
if data.starts_with(b"<svg") || data.starts_with(&[0x1f, 0x8b]) {
|
||||||
|
return Some(Self::Vector(VectorFormat::Svg));
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A vector graphics format.
|
/// A vector graphics format.
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Cast)]
|
||||||
pub enum VectorFormat {
|
pub enum VectorFormat {
|
||||||
|
@ -274,7 +274,7 @@ mod tests {
|
|||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn test(path: &str, format: RasterFormat, dpi: f64) {
|
fn test(path: &str, format: RasterFormat, dpi: f64) {
|
||||||
let data = typst_dev_assets::get(path).unwrap();
|
let data = typst_dev_assets::get(path).unwrap();
|
||||||
let bytes = Bytes::from_static(data);
|
let bytes = Bytes::new(data);
|
||||||
let image = RasterImage::new(bytes, format).unwrap();
|
let image = RasterImage::new(bytes, format).unwrap();
|
||||||
assert_eq!(image.dpi().map(f64::round), Some(dpi));
|
assert_eq!(image.dpi().map(f64::round), Some(dpi));
|
||||||
}
|
}
|
||||||
|
@ -110,6 +110,7 @@ impl Hash for Repr {
|
|||||||
// all used fonts gives us something similar.
|
// all used fonts gives us something similar.
|
||||||
self.data.hash(state);
|
self.data.hash(state);
|
||||||
self.font_hash.hash(state);
|
self.font_hash.hash(state);
|
||||||
|
self.flatten_text.hash(state);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,8 +67,8 @@ impl PolygonElem {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(title = "Regular Polygon")]
|
#[func(title = "Regular Polygon")]
|
||||||
pub fn regular(
|
pub fn regular(
|
||||||
/// The call span of this function.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
|
|
||||||
/// How to fill the polygon. See the general
|
/// How to fill the polygon. See the general
|
||||||
/// [polygon's documentation]($polygon.fill) for more details.
|
/// [polygon's documentation]($polygon.fill) for more details.
|
||||||
#[named]
|
#[named]
|
||||||
|
@ -97,8 +97,6 @@ impl Stroke {
|
|||||||
/// ```
|
/// ```
|
||||||
#[func(constructor)]
|
#[func(constructor)]
|
||||||
pub fn construct(
|
pub fn construct(
|
||||||
/// The real arguments (the other arguments are just for the docs, this
|
|
||||||
/// function is a bit involved, so we parse the arguments manually).
|
|
||||||
args: &mut Args,
|
args: &mut Args,
|
||||||
|
|
||||||
/// The color or gradient to use for the stroke.
|
/// The color or gradient to use for the stroke.
|
||||||
|
@ -138,7 +138,6 @@ impl Tiling {
|
|||||||
#[func(constructor)]
|
#[func(constructor)]
|
||||||
pub fn construct(
|
pub fn construct(
|
||||||
engine: &mut Engine,
|
engine: &mut Engine,
|
||||||
/// The callsite span.
|
|
||||||
span: Span,
|
span: Span,
|
||||||
/// The bounding box of each cell of the tiling.
|
/// The bounding box of each cell of the tiling.
|
||||||
#[named]
|
#[named]
|
||||||
|
@ -63,6 +63,11 @@ impl Elem {
|
|||||||
self.real_fields().filter(|field| !field.ghost)
|
self.real_fields().filter(|field| !field.ghost)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Fields that get accessor, with, and push methods.
|
||||||
|
fn accessor_fields(&self) -> impl Iterator<Item = &Field> + Clone {
|
||||||
|
self.struct_fields().filter(|field| !field.required)
|
||||||
|
}
|
||||||
|
|
||||||
/// Fields that are relevant for equality.
|
/// Fields that are relevant for equality.
|
||||||
///
|
///
|
||||||
/// Synthesized fields are excluded to ensure equality before and after
|
/// Synthesized fields are excluded to ensure equality before and after
|
||||||
@ -442,9 +447,9 @@ fn create_inherent_impl(element: &Elem) -> TokenStream {
|
|||||||
let Elem { ident, .. } = element;
|
let Elem { ident, .. } = element;
|
||||||
|
|
||||||
let new_func = create_new_func(element);
|
let new_func = create_new_func(element);
|
||||||
let with_field_methods = element.struct_fields().map(create_with_field_method);
|
let with_field_methods = element.accessor_fields().map(create_with_field_method);
|
||||||
let push_field_methods = element.struct_fields().map(create_push_field_method);
|
let push_field_methods = element.accessor_fields().map(create_push_field_method);
|
||||||
let field_methods = element.struct_fields().map(create_field_method);
|
let field_methods = element.accessor_fields().map(create_field_method);
|
||||||
let field_in_methods = element.style_fields().map(create_field_in_method);
|
let field_in_methods = element.style_fields().map(create_field_in_method);
|
||||||
let set_field_methods = element.style_fields().map(create_set_field_method);
|
let set_field_methods = element.style_fields().map(create_set_field_method);
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ use typst_syntax::Span;
|
|||||||
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
|
use xmp_writer::{DateTime, LangId, RenditionClass, XmpWriter};
|
||||||
|
|
||||||
use crate::page::PdfPageLabel;
|
use crate::page::PdfPageLabel;
|
||||||
use crate::{hash_base64, outline, TextStrExt, Timezone, WithEverything};
|
use crate::{hash_base64, outline, TextStrExt, Timestamp, Timezone, WithEverything};
|
||||||
|
|
||||||
/// Write the document catalog.
|
/// Write the document catalog.
|
||||||
pub fn write_catalog(
|
pub fn write_catalog(
|
||||||
@ -86,23 +86,10 @@ pub fn write_catalog(
|
|||||||
info.keywords(TextStr::trimmed(&joined));
|
info.keywords(TextStr::trimmed(&joined));
|
||||||
xmp.pdf_keywords(&joined);
|
xmp.pdf_keywords(&joined);
|
||||||
}
|
}
|
||||||
|
let (date, tz) = document_date(ctx.document.info.date, ctx.options.timestamp);
|
||||||
// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
|
if let Some(pdf_date) = date.and_then(|date| pdf_date(date, tz)) {
|
||||||
// (2) If the `document.date` is set to `auto` or not set, try to use the
|
info.creation_date(pdf_date);
|
||||||
// date from the options.
|
info.modified_date(pdf_date);
|
||||||
// (3) Otherwise, we don't write date metadata.
|
|
||||||
let (date, tz) = match (ctx.document.info.date, ctx.options.timestamp) {
|
|
||||||
(Smart::Custom(date), _) => (date, None),
|
|
||||||
(Smart::Auto, Some(timestamp)) => {
|
|
||||||
(Some(timestamp.datetime), Some(timestamp.timezone))
|
|
||||||
}
|
|
||||||
_ => (None, None),
|
|
||||||
};
|
|
||||||
if let Some(date) = date {
|
|
||||||
if let Some(pdf_date) = pdf_date(date, tz) {
|
|
||||||
info.creation_date(pdf_date);
|
|
||||||
info.modified_date(pdf_date);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
info.finish();
|
info.finish();
|
||||||
@ -154,7 +141,7 @@ pub fn write_catalog(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Assert dominance.
|
// Assert dominance.
|
||||||
if ctx.options.standards.pdfa {
|
if let Some((part, conformance)) = ctx.options.standards.pdfa_part {
|
||||||
let mut extension_schemas = xmp.extension_schemas();
|
let mut extension_schemas = xmp.extension_schemas();
|
||||||
extension_schemas
|
extension_schemas
|
||||||
.xmp_media_management()
|
.xmp_media_management()
|
||||||
@ -162,8 +149,8 @@ pub fn write_catalog(
|
|||||||
.describe_instance_id();
|
.describe_instance_id();
|
||||||
extension_schemas.pdf().properties().describe_all();
|
extension_schemas.pdf().properties().describe_all();
|
||||||
extension_schemas.finish();
|
extension_schemas.finish();
|
||||||
xmp.pdfa_part(2);
|
xmp.pdfa_part(part);
|
||||||
xmp.pdfa_conformance("B");
|
xmp.pdfa_conformance(conformance);
|
||||||
}
|
}
|
||||||
|
|
||||||
let xmp_buf = xmp.finish(None);
|
let xmp_buf = xmp.finish(None);
|
||||||
@ -182,13 +169,35 @@ pub fn write_catalog(
|
|||||||
catalog.viewer_preferences().direction(dir);
|
catalog.viewer_preferences().direction(dir);
|
||||||
catalog.metadata(meta_ref);
|
catalog.metadata(meta_ref);
|
||||||
|
|
||||||
// Write the named destination tree if there are any entries.
|
let has_dests = !ctx.references.named_destinations.dests.is_empty();
|
||||||
if !ctx.references.named_destinations.dests.is_empty() {
|
let has_embeddings = !ctx.references.embedded_files.is_empty();
|
||||||
|
|
||||||
|
// Write the `/Names` dictionary.
|
||||||
|
if has_dests || has_embeddings {
|
||||||
|
// Write the named destination tree if there are any entries.
|
||||||
let mut name_dict = catalog.names();
|
let mut name_dict = catalog.names();
|
||||||
let mut dests_name_tree = name_dict.destinations();
|
if has_dests {
|
||||||
let mut names = dests_name_tree.names();
|
let mut dests_name_tree = name_dict.destinations();
|
||||||
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
|
let mut names = dests_name_tree.names();
|
||||||
names.insert(Str(name.resolve().as_bytes()), dest_ref);
|
for &(name, dest_ref, ..) in &ctx.references.named_destinations.dests {
|
||||||
|
names.insert(Str(name.resolve().as_bytes()), dest_ref);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_embeddings {
|
||||||
|
let mut embedded_files = name_dict.embedded_files();
|
||||||
|
let mut names = embedded_files.names();
|
||||||
|
for (name, file_ref) in &ctx.references.embedded_files {
|
||||||
|
names.insert(Str(name.as_bytes()), *file_ref);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_embeddings && ctx.options.standards.pdfa {
|
||||||
|
// PDF 2.0, but ISO 19005-3 (PDF/A-3) Annex E allows it for PDF/A-3.
|
||||||
|
let mut associated_files = catalog.insert(Name(b"AF")).array().typed();
|
||||||
|
for (_, file_ref) in ctx.references.embedded_files {
|
||||||
|
associated_files.item(file_ref).finish();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,8 +298,27 @@ pub(crate) fn write_page_labels(
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Resolve the document date.
|
||||||
|
///
|
||||||
|
/// (1) If the `document.date` is set to specific `datetime` or `none`, use it.
|
||||||
|
/// (2) If the `document.date` is set to `auto` or not set, try to use the
|
||||||
|
/// date from the options.
|
||||||
|
/// (3) Otherwise, we don't write date metadata.
|
||||||
|
pub fn document_date(
|
||||||
|
document_date: Smart<Option<Datetime>>,
|
||||||
|
timestamp: Option<Timestamp>,
|
||||||
|
) -> (Option<Datetime>, Option<Timezone>) {
|
||||||
|
match (document_date, timestamp) {
|
||||||
|
(Smart::Custom(date), _) => (date, None),
|
||||||
|
(Smart::Auto, Some(timestamp)) => {
|
||||||
|
(Some(timestamp.datetime), Some(timestamp.timezone))
|
||||||
|
}
|
||||||
|
_ => (None, None),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Converts a datetime to a pdf-writer date.
|
/// Converts a datetime to a pdf-writer date.
|
||||||
fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
|
pub fn pdf_date(datetime: Datetime, tz: Option<Timezone>) -> Option<pdf_writer::Date> {
|
||||||
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
let year = datetime.year().filter(|&y| y >= 0)? as u16;
|
||||||
|
|
||||||
let mut pdf_date = pdf_writer::Date::new(year);
|
let mut pdf_date = pdf_writer::Date::new(year);
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user