Merge branch 'main' into warn-suppression
4
.github/workflows/ci.yml
vendored
@ -53,7 +53,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@1.74.0
|
||||
- uses: dtolnay/rust-toolchain@1.77.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo check --workspace
|
||||
|
||||
@ -64,7 +64,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2023-09-13
|
||||
toolchain: nightly-2024-06-01
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo install --locked cargo-fuzz@0.12.0
|
||||
- run: cd tests/fuzz && cargo fuzz build --dev
|
||||
|
861
Cargo.lock
generated
18
Cargo.toml
@ -5,7 +5,7 @@ resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.11.0"
|
||||
rust-version = "1.74" # also change in ci.yml
|
||||
rust-version = "1.77" # also change in ci.yml
|
||||
authors = ["The Typst Project Developers"]
|
||||
edition = "2021"
|
||||
homepage = "https://typst.app"
|
||||
@ -26,8 +26,8 @@ typst-svg = { path = "crates/typst-svg", version = "0.11.0" }
|
||||
typst-syntax = { path = "crates/typst-syntax", version = "0.11.0" }
|
||||
typst-timing = { path = "crates/typst-timing", version = "0.11.0" }
|
||||
typst-utils = { path = "crates/typst-utils", version = "0.11.0" }
|
||||
typst-assets = "0.11.0"
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "48a924d9de82b631bc775124a69384c8d860db04" }
|
||||
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "4ee794c" }
|
||||
typst-dev-assets = { git = "https://github.com/typst/typst-dev-assets", rev = "48a924d" }
|
||||
az = "1.2"
|
||||
base64 = "0.22"
|
||||
bitflags = { version = "2", features = ["serde"] }
|
||||
@ -49,7 +49,7 @@ flate2 = "1"
|
||||
fontdb = { version = "0.18", default-features = false }
|
||||
fs_extra = "1.3"
|
||||
hayagriva = "0.5.3"
|
||||
heck = "0.4"
|
||||
heck = "0.5"
|
||||
hypher = "0.1.4"
|
||||
icu_properties = { version = "1.4", features = ["serde"] }
|
||||
icu_provider = { version = "1.4", features = ["sync"] }
|
||||
@ -57,7 +57,7 @@ icu_provider_adapters = "1.4"
|
||||
icu_provider_blob = "1.4"
|
||||
icu_segmenter = { version = "1.4", features = ["serde"] }
|
||||
if_chain = "1"
|
||||
image = { version = "0.24", default-features = false, features = ["png", "jpeg", "gif"] }
|
||||
image = { version = "0.25", default-features = false, features = ["png", "jpeg", "gif"] }
|
||||
indexmap = { version = "2", features = ["serde"] }
|
||||
kamadak-exif = "0.5"
|
||||
kurbo = "0.11"
|
||||
@ -109,7 +109,7 @@ time = { version = "0.3.20", features = ["formatting", "macros", "parsing"] }
|
||||
tiny-skia = "0.11"
|
||||
toml = { version = "0.8", default-features = false, features = ["parse", "display"] }
|
||||
ttf-parser = "0.21.0"
|
||||
two-face = { version = "0.3.0", default-features = false, features = ["syntect-fancy"] }
|
||||
two-face = { version = "0.4.0", default-features = false, features = ["syntect-fancy"] }
|
||||
typed-arena = "2"
|
||||
unicode-bidi = "0.3.13"
|
||||
unicode-ident = "1.0"
|
||||
@ -121,13 +121,13 @@ unscanny = "0.1"
|
||||
ureq = { version = "2", default-features = false, features = ["native-tls", "gzip", "json"] }
|
||||
usvg = { version = "0.42", default-features = false, features = ["text"] }
|
||||
walkdir = "2"
|
||||
wasmi = "0.31.0"
|
||||
wasmi = "0.34.0"
|
||||
xmlparser = "0.13.5"
|
||||
xmlwriter = "0.1.0"
|
||||
xmp-writer = "0.2"
|
||||
xz2 = "0.1"
|
||||
xz2 = { version = "0.1", features = ["static"] }
|
||||
yaml-front-matter = "0.1"
|
||||
zip = { version = "0.6", default-features = false, features = ["deflate"] }
|
||||
zip = { version = "2", default-features = false, features = ["deflate"] }
|
||||
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 2
|
||||
|
@ -85,3 +85,13 @@ vendor-openssl = ["openssl/vendored"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
# The following metadata is used by `cargo-binstall`, and should be synchronized
|
||||
# with `.github/workflows/release.yml`.
|
||||
[package.metadata.binstall]
|
||||
pkg-url = "{ repo }/releases/download/v{ version }/typst-{ target }{ archive-suffix }"
|
||||
bin-dir = "typst-{ target }/typst{ binary-ext }"
|
||||
pkg-fmt = "txz"
|
||||
|
||||
[package.metadata.binstall.overrides.x86_64-pc-windows-msvc]
|
||||
pkg-fmt = "zip"
|
||||
|
@ -5,7 +5,7 @@ use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use clap::builder::ValueParser;
|
||||
use clap::builder::{TypedValueParser, ValueParser};
|
||||
use clap::{ArgAction, Args, ColorChoice, Parser, Subcommand, ValueEnum};
|
||||
use semver::Version;
|
||||
|
||||
@ -77,7 +77,7 @@ pub struct CompileCommand {
|
||||
/// must be present if the source document renders to multiple pages. Use `{p}` for page
|
||||
/// numbers, `{0p}` for zero padded page numbers and `{t}` for page count. For example,
|
||||
/// `page-{0p}-of-{t}.png` creates `page-01-of-10.png`, `page-02-of-10.png` and so on.
|
||||
#[clap(required_if_eq("input", "-"), value_parser = ValueParser::new(output_value_parser))]
|
||||
#[clap(required_if_eq("input", "-"), value_parser = make_output_value_parser())]
|
||||
pub output: Option<Output>,
|
||||
|
||||
/// Which pages to export. When unspecified, all document pages are exported.
|
||||
@ -100,9 +100,11 @@ pub struct CompileCommand {
|
||||
#[arg(long = "format", short = 'f')]
|
||||
pub format: Option<OutputFormat>,
|
||||
|
||||
/// Opens the output file using the default viewer after compilation.
|
||||
/// Ignored if output is stdout
|
||||
#[arg(long = "open")]
|
||||
/// Opens the output file with the default viewer or a specific program after
|
||||
/// compilation
|
||||
///
|
||||
/// Ignored if output is stdout.
|
||||
#[arg(long = "open", value_name = "VIEWER")]
|
||||
pub open: Option<Option<String>>,
|
||||
|
||||
/// The PPI (pixels per inch) to use for PNG export
|
||||
@ -175,7 +177,7 @@ pub enum SerializationFormat {
|
||||
#[derive(Debug, Clone, Args)]
|
||||
pub struct SharedArgs {
|
||||
/// Path to input Typst file. Use `-` to read input from stdin
|
||||
#[clap(value_parser = input_value_parser)]
|
||||
#[clap(value_parser = make_input_value_parser())]
|
||||
pub input: Input,
|
||||
|
||||
/// Configures the project root (for absolute paths)
|
||||
@ -277,26 +279,30 @@ impl Display for Output {
|
||||
}
|
||||
|
||||
/// The clap value parser used by `SharedArgs.input`
|
||||
fn input_value_parser(value: &str) -> Result<Input, clap::error::Error> {
|
||||
if value.is_empty() {
|
||||
Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))
|
||||
} else if value == "-" {
|
||||
Ok(Input::Stdin)
|
||||
} else {
|
||||
Ok(Input::Path(value.into()))
|
||||
}
|
||||
fn make_input_value_parser() -> impl TypedValueParser<Value = Input> {
|
||||
clap::builder::OsStringValueParser::new().try_map(|value| {
|
||||
if value.is_empty() {
|
||||
Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))
|
||||
} else if value == "-" {
|
||||
Ok(Input::Stdin)
|
||||
} else {
|
||||
Ok(Input::Path(value.into()))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// The clap value parser used by `CompileCommand.output`
|
||||
fn output_value_parser(value: &str) -> Result<Output, clap::error::Error> {
|
||||
// Empty value also handled by clap for `Option<Output>`
|
||||
if value.is_empty() {
|
||||
Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))
|
||||
} else if value == "-" {
|
||||
Ok(Output::Stdout)
|
||||
} else {
|
||||
Ok(Output::Path(value.into()))
|
||||
}
|
||||
fn make_output_value_parser() -> impl TypedValueParser<Value = Output> {
|
||||
clap::builder::OsStringValueParser::new().try_map(|value| {
|
||||
// Empty value also handled by clap for `Option<Output>`
|
||||
if value.is_empty() {
|
||||
Err(clap::Error::new(clap::error::ErrorKind::InvalidValue))
|
||||
} else if value == "-" {
|
||||
Ok(Output::Stdout)
|
||||
} else {
|
||||
Ok(Output::Path(value.into()))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Parses key/value pairs split by the first equal sign.
|
||||
|
@ -5,16 +5,16 @@ use std::path::{Path, PathBuf};
|
||||
use chrono::{Datelike, Timelike};
|
||||
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
||||
use codespan_reporting::term;
|
||||
use ecow::{eco_format, eco_vec, EcoString, EcoVec};
|
||||
use ecow::{eco_format, EcoString};
|
||||
use parking_lot::RwLock;
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use typst::diag::{bail, FileError, Severity, SourceDiagnostic, StrResult, Warned};
|
||||
use typst::diag::{bail, Severity, SourceDiagnostic, StrResult, Warned};
|
||||
use typst::foundations::{Datetime, Smart};
|
||||
use typst::layout::{Frame, PageRanges};
|
||||
use typst::model::Document;
|
||||
use typst::syntax::{FileId, Source, Span};
|
||||
use typst::visualize::Color;
|
||||
use typst::{World, WorldExt};
|
||||
use typst::WorldExt;
|
||||
|
||||
use crate::args::{
|
||||
CompileCommand, DiagnosticFormat, Input, Output, OutputFormat, PageRangeArgument,
|
||||
@ -96,21 +96,6 @@ pub fn compile_once(
|
||||
Status::Compiling.print(command).unwrap();
|
||||
}
|
||||
|
||||
if let Err(errors) = world
|
||||
.source(world.main())
|
||||
.map_err(|err| hint_invalid_main_file(err, &command.common.input))
|
||||
{
|
||||
set_failed();
|
||||
if watching {
|
||||
Status::Error.print(command).unwrap();
|
||||
}
|
||||
|
||||
print_diagnostics(world, &errors, &[], command.common.diagnostic_format)
|
||||
.map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let Warned { output, warnings } = typst::compile(world);
|
||||
|
||||
match output {
|
||||
@ -472,60 +457,30 @@ fn write_make_deps(world: &mut SystemWorld, command: &CompileCommand) -> StrResu
|
||||
/// Opens the given file using:
|
||||
/// - The default file viewer if `open` is `None`.
|
||||
/// - The given viewer provided by `open` if it is `Some`.
|
||||
///
|
||||
/// If the file could not be opened, an error is returned.
|
||||
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
|
||||
// Some resource openers require the path to be canonicalized.
|
||||
let path = path
|
||||
.canonicalize()
|
||||
.map_err(|err| eco_format!("failed to canonicalize path ({err})"))?;
|
||||
if let Some(app) = open {
|
||||
open::with_in_background(path, app);
|
||||
open::with_detached(&path, app)
|
||||
.map_err(|err| eco_format!("failed to open file with {} ({})", app, err))
|
||||
} else {
|
||||
open::that_in_background(path);
|
||||
open::that_detached(&path).map_err(|err| {
|
||||
let openers = open::commands(path)
|
||||
.iter()
|
||||
.map(|command| command.get_program().to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
eco_format!(
|
||||
"failed to open file with any of these resource openers: {} ({})",
|
||||
openers,
|
||||
err,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Adds useful hints when the main source file couldn't be read
|
||||
/// and returns the final diagnostic.
|
||||
fn hint_invalid_main_file(
|
||||
file_error: FileError,
|
||||
input: &Input,
|
||||
) -> EcoVec<SourceDiagnostic> {
|
||||
let is_utf8_error = matches!(file_error, FileError::InvalidUtf8);
|
||||
let mut diagnostic =
|
||||
SourceDiagnostic::error(Span::detached(), EcoString::from(file_error));
|
||||
|
||||
// Attempt to provide helpful hints for UTF-8 errors.
|
||||
// Perhaps the user mistyped the filename.
|
||||
// For example, they could have written "file.pdf" instead of
|
||||
// "file.typ".
|
||||
if is_utf8_error {
|
||||
if let Input::Path(path) = input {
|
||||
let extension = path.extension();
|
||||
if extension.is_some_and(|extension| extension == "typ") {
|
||||
// No hints if the file is already a .typ file.
|
||||
// The file is indeed just invalid.
|
||||
return eco_vec![diagnostic];
|
||||
}
|
||||
|
||||
match extension {
|
||||
Some(extension) => {
|
||||
diagnostic.hint(eco_format!(
|
||||
"a file with the `.{}` extension is not usually a Typst file",
|
||||
extension.to_string_lossy()
|
||||
));
|
||||
}
|
||||
|
||||
None => {
|
||||
diagnostic
|
||||
.hint("a file without an extension is not usually a Typst file");
|
||||
}
|
||||
};
|
||||
|
||||
if path.with_extension("typ").exists() {
|
||||
diagnostic.hint("check if you meant to use the `.typ` extension instead");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
eco_vec![diagnostic]
|
||||
}
|
||||
|
||||
/// Print diagnostic messages to the terminal.
|
||||
|
@ -192,8 +192,8 @@ impl World for SystemWorld {
|
||||
&self.book
|
||||
}
|
||||
|
||||
fn main(&self) -> Source {
|
||||
self.source(self.main).unwrap()
|
||||
fn main(&self) -> FileId {
|
||||
self.main
|
||||
}
|
||||
|
||||
fn source(&self, id: FileId) -> FileResult<Source> {
|
||||
|
@ -49,7 +49,6 @@ pub fn analyze_expr(
|
||||
pub fn analyze_import(world: &dyn World, source: &LinkedNode) -> Option<Value> {
|
||||
// Use span in the node for resolving imports with relative paths.
|
||||
let source_span = source.span();
|
||||
|
||||
let (source, _) = analyze_expr(world, source).into_iter().next()?;
|
||||
if source.scope().is_some() {
|
||||
return Some(source);
|
||||
@ -73,6 +72,7 @@ pub fn analyze_import(world: &dyn World, source: &LinkedNode) -> Option<Value> {
|
||||
Scopes::new(Some(world.library())),
|
||||
Span::detached(),
|
||||
);
|
||||
|
||||
typst::eval::import(&mut vm, source, source_span, true)
|
||||
.ok()
|
||||
.map(Value::Module)
|
||||
|
@ -17,8 +17,10 @@ use typst::visualize::Color;
|
||||
use typst::World;
|
||||
use unscanny::Scanner;
|
||||
|
||||
use crate::analyze::{analyze_expr, analyze_import, analyze_labels};
|
||||
use crate::{plain_docs_sentence, summarize_font_family};
|
||||
use crate::{
|
||||
analyze_expr, analyze_import, analyze_labels, named_items, plain_docs_sentence,
|
||||
summarize_font_family,
|
||||
};
|
||||
|
||||
/// Autocomplete a cursor position in a source file.
|
||||
///
|
||||
@ -334,6 +336,13 @@ fn math_completions(ctx: &mut CompletionContext) {
|
||||
|
||||
/// Complete field accesses.
|
||||
fn complete_field_accesses(ctx: &mut CompletionContext) -> bool {
|
||||
// Used to determine whether trivia nodes are allowed before '.'.
|
||||
// During an inline expression in markup mode trivia nodes exit the inline expression.
|
||||
let in_markup: bool = matches!(
|
||||
ctx.leaf.parent_kind(),
|
||||
None | Some(SyntaxKind::Markup) | Some(SyntaxKind::Ref)
|
||||
);
|
||||
|
||||
// Behind an expression plus dot: "emoji.|".
|
||||
if_chain! {
|
||||
if ctx.leaf.kind() == SyntaxKind::Dot
|
||||
@ -341,6 +350,7 @@ fn complete_field_accesses(ctx: &mut CompletionContext) -> bool {
|
||||
&& ctx.leaf.text() == ".");
|
||||
if ctx.leaf.range().end == ctx.cursor;
|
||||
if let Some(prev) = ctx.leaf.prev_sibling();
|
||||
if !in_markup || prev.range().end == ctx.leaf.range().start;
|
||||
if prev.is::<ast::Expr>();
|
||||
if prev.parent_kind() != Some(SyntaxKind::Markup) ||
|
||||
prev.prev_sibling_kind() == Some(SyntaxKind::Hash);
|
||||
@ -376,12 +386,12 @@ fn field_access_completions(
|
||||
value: &Value,
|
||||
styles: &Option<Styles>,
|
||||
) {
|
||||
for (name, value) in value.ty().scope().iter() {
|
||||
for (name, value, _) in value.ty().scope().iter() {
|
||||
ctx.value_completion(Some(name.clone()), value, true, None);
|
||||
}
|
||||
|
||||
if let Some(scope) = value.scope() {
|
||||
for (name, value) in scope.iter() {
|
||||
for (name, value, _) in scope.iter() {
|
||||
ctx.value_completion(Some(name.clone()), value, true, None);
|
||||
}
|
||||
}
|
||||
@ -547,7 +557,7 @@ fn import_item_completions<'a>(
|
||||
ctx.snippet_completion("*", "*", "Import everything.");
|
||||
}
|
||||
|
||||
for (name, value) in scope.iter() {
|
||||
for (name, value, _) in scope.iter() {
|
||||
if existing.iter().all(|item| item.original_name().as_str() != name) {
|
||||
ctx.value_completion(Some(name.clone()), value, false, None);
|
||||
}
|
||||
@ -1319,62 +1329,12 @@ impl<'a> CompletionContext<'a> {
|
||||
/// Filters the global/math scope with the given filter.
|
||||
fn scope_completions(&mut self, parens: bool, filter: impl Fn(&Value) -> bool) {
|
||||
let mut defined = BTreeSet::new();
|
||||
|
||||
let mut ancestor = Some(self.leaf.clone());
|
||||
while let Some(node) = &ancestor {
|
||||
let mut sibling = Some(node.clone());
|
||||
while let Some(node) = &sibling {
|
||||
if let Some(v) = node.cast::<ast::LetBinding>() {
|
||||
for ident in v.kind().bindings() {
|
||||
defined.insert(ident.get().clone());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(v) = node.cast::<ast::ModuleImport>() {
|
||||
let imports = v.imports();
|
||||
match imports {
|
||||
None | Some(ast::Imports::Wildcard) => {
|
||||
if let Some(value) = node
|
||||
.children()
|
||||
.find(|child| child.is::<ast::Expr>())
|
||||
.and_then(|source| analyze_import(self.world, &source))
|
||||
{
|
||||
if imports.is_none() {
|
||||
defined.extend(value.name().map(Into::into));
|
||||
} else if let Some(scope) = value.scope() {
|
||||
for (name, _) in scope.iter() {
|
||||
defined.insert(name.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
for item in items.iter() {
|
||||
defined.insert(item.bound_name().get().clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sibling = node.prev_sibling();
|
||||
named_items(self.world, self.leaf.clone(), |name| {
|
||||
if name.value().as_ref().map_or(true, &filter) {
|
||||
defined.insert(name.name().clone());
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
if let Some(v) = parent.cast::<ast::ForLoop>() {
|
||||
if node.prev_sibling_kind() != Some(SyntaxKind::In) {
|
||||
let pattern = v.pattern();
|
||||
for ident in pattern.bindings() {
|
||||
defined.insert(ident.get().clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ancestor = Some(parent.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
None::<()>
|
||||
});
|
||||
|
||||
let in_math = matches!(
|
||||
self.leaf.parent_kind(),
|
||||
@ -1385,7 +1345,7 @@ impl<'a> CompletionContext<'a> {
|
||||
);
|
||||
|
||||
let scope = if in_math { self.math } else { self.global };
|
||||
for (name, value) in scope.iter() {
|
||||
for (name, value, _) in scope.iter() {
|
||||
if filter(value) && !defined.contains(name) {
|
||||
self.value_completion(Some(name.clone()), value, parens, None);
|
||||
}
|
||||
@ -1433,6 +1393,16 @@ mod tests {
|
||||
test("#().", 4, &["insert", "remove", "len", "all"], &["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_whitespace_in_autocomplete() {
|
||||
//Check that extra space before '.' is handled correctly.
|
||||
test("#() .", 5, &[], &["insert", "remove", "len", "all"]);
|
||||
test("#{() .}", 6, &["insert", "remove", "len", "all"], &["foo"]);
|
||||
|
||||
test("#() .a", 6, &[], &["insert", "remove", "len", "all"]);
|
||||
test("#{() .a}", 7, &["at", "any", "all"], &["foo"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_before_window_char_boundary() {
|
||||
// Check that the `before_window` doesn't slice into invalid byte
|
||||
|
262
crates/typst-ide/src/definition.rs
Normal file
@ -0,0 +1,262 @@
|
||||
use ecow::EcoString;
|
||||
use typst::foundations::{Label, Module, Selector, Value};
|
||||
use typst::model::Document;
|
||||
use typst::syntax::ast::AstNode;
|
||||
use typst::syntax::{ast, LinkedNode, Side, Source, Span, SyntaxKind};
|
||||
use typst::World;
|
||||
|
||||
use crate::{analyze_import, deref_target, named_items, DerefTarget, NamedItem};
|
||||
|
||||
/// Find the definition of the item under the cursor.
|
||||
///
|
||||
/// Passing a `document` (from a previous compilation) is optional, but enhances
|
||||
/// the definition search. Label definitions, for instance, are only generated
|
||||
/// when the document is available.
|
||||
pub fn definition(
|
||||
world: &dyn World,
|
||||
document: Option<&Document>,
|
||||
source: &Source,
|
||||
cursor: usize,
|
||||
side: Side,
|
||||
) -> Option<Definition> {
|
||||
let root = LinkedNode::new(source.root());
|
||||
let leaf = root.leaf_at(cursor, side)?;
|
||||
|
||||
let mut use_site = match deref_target(leaf.clone())? {
|
||||
DerefTarget::VarAccess(node) | DerefTarget::Callee(node) => node,
|
||||
DerefTarget::IncludePath(path) | DerefTarget::ImportPath(path) => {
|
||||
let import_item =
|
||||
analyze_import(world, &path).and_then(|v| v.cast::<Module>().ok())?;
|
||||
return Some(Definition::module(&import_item, path.span(), Span::detached()));
|
||||
}
|
||||
DerefTarget::Ref(r) => {
|
||||
let label = Label::new(r.cast::<ast::Ref>()?.target());
|
||||
let sel = Selector::Label(label);
|
||||
let elem = document?.introspector.query_first(&sel)?;
|
||||
let span = elem.span();
|
||||
return Some(Definition {
|
||||
kind: DefinitionKind::Label,
|
||||
name: label.as_str().into(),
|
||||
value: Some(Value::Label(label)),
|
||||
span,
|
||||
name_span: Span::detached(),
|
||||
});
|
||||
}
|
||||
DerefTarget::Label(..) | DerefTarget::Code(..) => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
let mut has_path = false;
|
||||
while let Some(node) = use_site.cast::<ast::FieldAccess>() {
|
||||
has_path = true;
|
||||
use_site = use_site.find(node.target().span())?;
|
||||
}
|
||||
|
||||
let name = use_site.cast::<ast::Ident>()?.get().clone();
|
||||
let src = named_items(world, use_site, |item: NamedItem| {
|
||||
if *item.name() != name {
|
||||
return None;
|
||||
}
|
||||
|
||||
match item {
|
||||
NamedItem::Var(name) => {
|
||||
let name_span = name.span();
|
||||
let span = find_let_binding(source, name_span);
|
||||
Some(Definition::item(name.get().clone(), span, name_span, None))
|
||||
}
|
||||
NamedItem::Fn(name) => {
|
||||
let name_span = name.span();
|
||||
let span = find_let_binding(source, name_span);
|
||||
Some(
|
||||
Definition::item(name.get().clone(), span, name_span, None)
|
||||
.with_kind(DefinitionKind::Function),
|
||||
)
|
||||
}
|
||||
NamedItem::Module(item, site) => Some(Definition::module(
|
||||
item,
|
||||
site.span(),
|
||||
matches!(site.kind(), SyntaxKind::Ident)
|
||||
.then_some(site.span())
|
||||
.unwrap_or_else(Span::detached),
|
||||
)),
|
||||
NamedItem::Import(name, name_span, value) => Some(Definition::item(
|
||||
name.clone(),
|
||||
Span::detached(),
|
||||
name_span,
|
||||
value.cloned(),
|
||||
)),
|
||||
}
|
||||
});
|
||||
|
||||
let src = src.or_else(|| {
|
||||
let in_math = matches!(
|
||||
leaf.parent_kind(),
|
||||
Some(SyntaxKind::Equation)
|
||||
| Some(SyntaxKind::Math)
|
||||
| Some(SyntaxKind::MathFrac)
|
||||
| Some(SyntaxKind::MathAttach)
|
||||
);
|
||||
|
||||
let library = world.library();
|
||||
let scope = if in_math { library.math.scope() } else { library.global.scope() };
|
||||
for (item_name, value, span) in scope.iter() {
|
||||
if *item_name == name {
|
||||
return Some(Definition::item(
|
||||
name,
|
||||
span,
|
||||
Span::detached(),
|
||||
Some(value.clone()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})?;
|
||||
|
||||
(!has_path).then_some(src)
|
||||
}
|
||||
|
||||
/// A definition of some item.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Definition {
|
||||
/// The name of the definition.
|
||||
pub name: EcoString,
|
||||
/// The kind of the definition.
|
||||
pub kind: DefinitionKind,
|
||||
/// An instance of the definition, if available.
|
||||
pub value: Option<Value>,
|
||||
/// The source span of the entire definition. May be detached if unknown.
|
||||
pub span: Span,
|
||||
/// The span of the definition's name. May be detached if unknown.
|
||||
pub name_span: Span,
|
||||
}
|
||||
|
||||
impl Definition {
|
||||
fn item(name: EcoString, span: Span, name_span: Span, value: Option<Value>) -> Self {
|
||||
Self {
|
||||
name,
|
||||
kind: match value {
|
||||
Some(Value::Func(_)) => DefinitionKind::Function,
|
||||
_ => DefinitionKind::Variable,
|
||||
},
|
||||
value,
|
||||
span,
|
||||
name_span,
|
||||
}
|
||||
}
|
||||
|
||||
fn module(module: &Module, span: Span, name_span: Span) -> Self {
|
||||
Definition {
|
||||
name: module.name().clone(),
|
||||
kind: DefinitionKind::Module,
|
||||
value: Some(Value::Module(module.clone())),
|
||||
span,
|
||||
name_span,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_kind(self, kind: DefinitionKind) -> Self {
|
||||
Self { kind, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
/// A kind of item that is definition.
|
||||
#[derive(Debug, Clone, PartialEq, Hash)]
|
||||
pub enum DefinitionKind {
|
||||
/// ```plain
|
||||
/// let foo;
|
||||
/// ^^^^^^^^ span
|
||||
/// ^^^ name_span
|
||||
/// ```
|
||||
Variable,
|
||||
/// ```plain
|
||||
/// let foo(it) = it;
|
||||
/// ^^^^^^^^^^^^^^^^^ span
|
||||
/// ^^^ name_span
|
||||
/// ```
|
||||
Function,
|
||||
/// Case 1
|
||||
/// ```plain
|
||||
/// import "foo.typ": *
|
||||
/// ^^^^^^^^^ span
|
||||
/// name_span is detached
|
||||
/// ```
|
||||
///
|
||||
/// Case 2
|
||||
/// ```plain
|
||||
/// import "foo.typ" as bar: *
|
||||
/// span ^^^
|
||||
/// name_span ^^^
|
||||
/// ```
|
||||
Module,
|
||||
/// ```plain
|
||||
/// <foo>
|
||||
/// ^^^^^ span
|
||||
/// name_span is detached
|
||||
/// ```
|
||||
Label,
|
||||
}
|
||||
|
||||
fn find_let_binding(source: &Source, name_span: Span) -> Span {
|
||||
let node = LinkedNode::new(source.root());
|
||||
std::iter::successors(node.find(name_span).as_ref(), |n| n.parent())
|
||||
.find(|n| matches!(n.kind(), SyntaxKind::LetBinding))
|
||||
.map(|s| s.span())
|
||||
.unwrap_or_else(Span::detached)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::ops::Range;
|
||||
|
||||
use typst::foundations::{IntoValue, Label, NativeElement, Value};
|
||||
use typst::syntax::Side;
|
||||
use typst::WorldExt;
|
||||
|
||||
use super::{definition, DefinitionKind as Kind};
|
||||
use crate::tests::TestWorld;
|
||||
|
||||
#[track_caller]
|
||||
fn test<T>(
|
||||
text: &str,
|
||||
cursor: usize,
|
||||
name: &str,
|
||||
kind: Kind,
|
||||
value: Option<T>,
|
||||
range: Option<Range<usize>>,
|
||||
) where
|
||||
T: IntoValue,
|
||||
{
|
||||
let world = TestWorld::new(text);
|
||||
let doc = typst::compile(&world).output.ok();
|
||||
let actual = definition(&world, doc.as_ref(), &world.main, cursor, Side::After)
|
||||
.map(|d| (d.kind, d.name, world.range(d.span), d.value));
|
||||
assert_eq!(
|
||||
actual,
|
||||
Some((kind, name.into(), range, value.map(IntoValue::into_value)))
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_definition() {
|
||||
test("#let x; #x", 9, "x", Kind::Variable, None::<Value>, Some(1..6));
|
||||
test("#let x() = {}; #x", 16, "x", Kind::Function, None::<Value>, Some(1..13));
|
||||
test(
|
||||
"#table",
|
||||
1,
|
||||
"table",
|
||||
Kind::Function,
|
||||
Some(typst::model::TableElem::elem()),
|
||||
None,
|
||||
);
|
||||
test(
|
||||
"#figure[] <hi> See @hi",
|
||||
21,
|
||||
"hi",
|
||||
Kind::Label,
|
||||
Some(Label::new("hi")),
|
||||
Some(1..9),
|
||||
);
|
||||
}
|
||||
}
|
@ -2,12 +2,16 @@
|
||||
|
||||
mod analyze;
|
||||
mod complete;
|
||||
mod definition;
|
||||
mod jump;
|
||||
mod matchers;
|
||||
mod tooltip;
|
||||
|
||||
pub use self::analyze::analyze_labels;
|
||||
pub use self::analyze::{analyze_expr, analyze_import, analyze_labels};
|
||||
pub use self::complete::{autocomplete, Completion, CompletionKind};
|
||||
pub use self::definition::{definition, Definition, DefinitionKind};
|
||||
pub use self::jump::{jump_from_click, jump_from_cursor, Jump};
|
||||
pub use self::matchers::{deref_target, named_items, DerefTarget, NamedItem};
|
||||
pub use self::tooltip::{tooltip, Tooltip};
|
||||
|
||||
use std::fmt::Write;
|
||||
@ -135,8 +139,8 @@ mod tests {
|
||||
&self.base.book
|
||||
}
|
||||
|
||||
fn main(&self) -> Source {
|
||||
self.main.clone()
|
||||
fn main(&self) -> FileId {
|
||||
self.main.id()
|
||||
}
|
||||
|
||||
fn source(&self, id: FileId) -> FileResult<Source> {
|
||||
|
270
crates/typst-ide/src/matchers.rs
Normal file
@ -0,0 +1,270 @@
|
||||
use ecow::EcoString;
|
||||
use typst::foundations::{Module, Value};
|
||||
use typst::syntax::ast::AstNode;
|
||||
use typst::syntax::{ast, LinkedNode, Span, SyntaxKind, SyntaxNode};
|
||||
use typst::World;
|
||||
|
||||
use crate::analyze_import;
|
||||
|
||||
/// Find the named items starting from the given position.
|
||||
pub fn named_items<T>(
|
||||
world: &dyn World,
|
||||
position: LinkedNode,
|
||||
mut recv: impl FnMut(NamedItem) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let mut ancestor = Some(position);
|
||||
while let Some(node) = &ancestor {
|
||||
let mut sibling = Some(node.clone());
|
||||
while let Some(node) = &sibling {
|
||||
if let Some(v) = node.cast::<ast::LetBinding>() {
|
||||
let kind = if matches!(v.kind(), ast::LetBindingKind::Closure(..)) {
|
||||
NamedItem::Fn
|
||||
} else {
|
||||
NamedItem::Var
|
||||
};
|
||||
for ident in v.kind().bindings() {
|
||||
if let Some(res) = recv(kind(ident)) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(v) = node.cast::<ast::ModuleImport>() {
|
||||
let imports = v.imports();
|
||||
let source = node
|
||||
.children()
|
||||
.find(|child| child.is::<ast::Expr>())
|
||||
.and_then(|source: LinkedNode| {
|
||||
Some((analyze_import(world, &source)?, source))
|
||||
});
|
||||
let source = source.as_ref();
|
||||
|
||||
// Seeing the module itself.
|
||||
if let Some((value, source)) = source {
|
||||
let site = match (imports, v.new_name()) {
|
||||
// ```plain
|
||||
// import "foo" as name;
|
||||
// import "foo" as name: ..;
|
||||
// ```
|
||||
(_, Some(name)) => Some(name.to_untyped()),
|
||||
// ```plain
|
||||
// import "foo";
|
||||
// ```
|
||||
(None, None) => Some(source.get()),
|
||||
// ```plain
|
||||
// import "foo": ..;
|
||||
// ```
|
||||
(Some(..), None) => None,
|
||||
};
|
||||
|
||||
if let Some((site, value)) =
|
||||
site.zip(value.clone().cast::<Module>().ok())
|
||||
{
|
||||
if let Some(res) = recv(NamedItem::Module(&value, site)) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Seeing the imported items.
|
||||
match imports {
|
||||
// ```plain
|
||||
// import "foo";
|
||||
// ```
|
||||
None => {}
|
||||
// ```plain
|
||||
// import "foo": *;
|
||||
// ```
|
||||
Some(ast::Imports::Wildcard) => {
|
||||
if let Some(scope) = source.and_then(|(value, _)| value.scope()) {
|
||||
for (name, value, span) in scope.iter() {
|
||||
let item = NamedItem::Import(name, span, Some(value));
|
||||
if let Some(res) = recv(item) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// ```plain
|
||||
// import "foo": items;
|
||||
// ```
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
for item in items.iter() {
|
||||
let original = item.original_name();
|
||||
let bound = item.bound_name();
|
||||
let scope = source.and_then(|(value, _)| value.scope());
|
||||
let span = scope
|
||||
.and_then(|s| s.get_span(&original))
|
||||
.unwrap_or(Span::detached())
|
||||
.or(bound.span());
|
||||
|
||||
let value = scope.and_then(|s| s.get(&original));
|
||||
if let Some(res) =
|
||||
recv(NamedItem::Import(bound.get(), span, value))
|
||||
{
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sibling = node.prev_sibling();
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
if let Some(v) = parent.cast::<ast::ForLoop>() {
|
||||
if node.prev_sibling_kind() != Some(SyntaxKind::In) {
|
||||
let pattern = v.pattern();
|
||||
for ident in pattern.bindings() {
|
||||
if let Some(res) = recv(NamedItem::Var(ident)) {
|
||||
return Some(res);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ancestor = Some(parent.clone());
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// An item that is named.
|
||||
pub enum NamedItem<'a> {
|
||||
/// A variable item.
|
||||
Var(ast::Ident<'a>),
|
||||
/// A function item.
|
||||
Fn(ast::Ident<'a>),
|
||||
/// A (imported) module item.
|
||||
Module(&'a Module, &'a SyntaxNode),
|
||||
/// An imported item.
|
||||
Import(&'a EcoString, Span, Option<&'a Value>),
|
||||
}
|
||||
|
||||
impl<'a> NamedItem<'a> {
|
||||
pub(crate) fn name(&self) -> &'a EcoString {
|
||||
match self {
|
||||
NamedItem::Var(ident) => ident.get(),
|
||||
NamedItem::Fn(ident) => ident.get(),
|
||||
NamedItem::Module(value, _) => value.name(),
|
||||
NamedItem::Import(name, _, _) => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn value(&self) -> Option<Value> {
|
||||
match self {
|
||||
NamedItem::Var(..) | NamedItem::Fn(..) => None,
|
||||
NamedItem::Module(value, _) => Some(Value::Module((*value).clone())),
|
||||
NamedItem::Import(_, _, value) => value.cloned(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Categorize an expression into common classes IDE functionality can operate
|
||||
/// on.
|
||||
pub fn deref_target(node: LinkedNode) -> Option<DerefTarget<'_>> {
|
||||
// Move to the first ancestor that is an expression.
|
||||
let mut ancestor = node;
|
||||
while !ancestor.is::<ast::Expr>() {
|
||||
ancestor = ancestor.parent()?.clone();
|
||||
}
|
||||
|
||||
// Identify convenient expression kinds.
|
||||
let expr_node = ancestor;
|
||||
let expr = expr_node.cast::<ast::Expr>()?;
|
||||
Some(match expr {
|
||||
ast::Expr::Label(..) => DerefTarget::Label(expr_node),
|
||||
ast::Expr::Ref(..) => DerefTarget::Ref(expr_node),
|
||||
ast::Expr::FuncCall(call) => {
|
||||
DerefTarget::Callee(expr_node.find(call.callee().span())?)
|
||||
}
|
||||
ast::Expr::Set(set) => DerefTarget::Callee(expr_node.find(set.target().span())?),
|
||||
ast::Expr::Ident(..) | ast::Expr::MathIdent(..) | ast::Expr::FieldAccess(..) => {
|
||||
DerefTarget::VarAccess(expr_node)
|
||||
}
|
||||
ast::Expr::Str(..) => {
|
||||
let parent = expr_node.parent()?;
|
||||
if parent.kind() == SyntaxKind::ModuleImport {
|
||||
DerefTarget::ImportPath(expr_node)
|
||||
} else if parent.kind() == SyntaxKind::ModuleInclude {
|
||||
DerefTarget::IncludePath(expr_node)
|
||||
} else {
|
||||
DerefTarget::Code(expr_node.kind(), expr_node)
|
||||
}
|
||||
}
|
||||
_ if expr.hash()
|
||||
|| matches!(expr_node.kind(), SyntaxKind::MathIdent | SyntaxKind::Error) =>
|
||||
{
|
||||
DerefTarget::Code(expr_node.kind(), expr_node)
|
||||
}
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
/// Classes of expressions that can be operated on by IDE functionality.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DerefTarget<'a> {
|
||||
/// A label expression.
|
||||
Label(LinkedNode<'a>),
|
||||
/// A reference expression.
|
||||
Ref(LinkedNode<'a>),
|
||||
/// A variable access expression.
|
||||
///
|
||||
/// It can be either an identifier or a field access.
|
||||
VarAccess(LinkedNode<'a>),
|
||||
/// A function call expression.
|
||||
Callee(LinkedNode<'a>),
|
||||
/// An import path expression.
|
||||
ImportPath(LinkedNode<'a>),
|
||||
/// An include path expression.
|
||||
IncludePath(LinkedNode<'a>),
|
||||
/// Any code expression.
|
||||
Code(SyntaxKind, LinkedNode<'a>),
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use typst::syntax::{LinkedNode, Side};
|
||||
|
||||
use crate::{named_items, tests::TestWorld};
|
||||
|
||||
#[track_caller]
|
||||
fn has_named_items(text: &str, cursor: usize, containing: &str) -> bool {
|
||||
let world = TestWorld::new(text);
|
||||
|
||||
let src = world.main.clone();
|
||||
let node = LinkedNode::new(src.root());
|
||||
let leaf = node.leaf_at(cursor, Side::After).unwrap();
|
||||
|
||||
let res = named_items(&world, leaf, |s| {
|
||||
if containing == s.name() {
|
||||
return Some(true);
|
||||
}
|
||||
|
||||
None
|
||||
});
|
||||
|
||||
res.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_simple_named_items() {
|
||||
// Has named items
|
||||
assert!(has_named_items(r#"#let a = 1;#let b = 2;"#, 8, "a"));
|
||||
assert!(has_named_items(r#"#let a = 1;#let b = 2;"#, 15, "a"));
|
||||
|
||||
// Doesn't have named items
|
||||
assert!(!has_named_items(r#"#let a = 1;#let b = 2;"#, 8, "b"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import_named_items() {
|
||||
// Cannot test much.
|
||||
assert!(has_named_items(r#"#import "foo.typ": a; #(a);"#, 24, "a"));
|
||||
}
|
||||
}
|
@ -11,14 +11,13 @@ use typst::syntax::{ast, LinkedNode, Side, Source, SyntaxKind};
|
||||
use typst::utils::{round_2, Numeric};
|
||||
use typst::World;
|
||||
|
||||
use crate::analyze::{analyze_expr, analyze_labels};
|
||||
use crate::{plain_docs_sentence, summarize_font_family};
|
||||
use crate::{analyze_expr, analyze_labels, plain_docs_sentence, summarize_font_family};
|
||||
|
||||
/// Describe the item under the cursor.
|
||||
///
|
||||
/// Passing a `document` (from a previous compilation) is optional, but enhances
|
||||
/// the autocompletions. Label completions, for instance, are only generated
|
||||
/// when the document is available.
|
||||
/// the tooltips. Label tooltips, for instance, are only generated when the
|
||||
/// document is available.
|
||||
pub fn tooltip(
|
||||
world: &dyn World,
|
||||
document: Option<&Document>,
|
||||
@ -127,7 +126,7 @@ fn closure_tooltip(leaf: &LinkedNode) -> Option<Tooltip> {
|
||||
|
||||
let captures = visitor.finish();
|
||||
let mut names: Vec<_> =
|
||||
captures.iter().map(|(name, _)| eco_format!("`{name}`")).collect();
|
||||
captures.iter().map(|(name, ..)| eco_format!("`{name}`")).collect();
|
||||
if names.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ pub struct Builder<'a, R = ()> {
|
||||
state: State,
|
||||
/// Stack of saved graphic states.
|
||||
saves: Vec<State>,
|
||||
/// Wheter any stroke or fill was not totally opaque.
|
||||
/// Whether any stroke or fill was not totally opaque.
|
||||
uses_opacities: bool,
|
||||
/// All clickable links that are present in this content.
|
||||
links: Vec<(Destination, Rect)>,
|
||||
@ -129,7 +129,7 @@ struct State {
|
||||
/// The color space of the current fill paint.
|
||||
fill_space: Option<Name<'static>>,
|
||||
/// The current external graphic state.
|
||||
external_graphics_state: Option<ExtGState>,
|
||||
external_graphics_state: ExtGState,
|
||||
/// The current stroke paint.
|
||||
stroke: Option<FixedStroke>,
|
||||
/// The color space of the current stroke paint.
|
||||
@ -148,7 +148,7 @@ impl State {
|
||||
font: None,
|
||||
fill: None,
|
||||
fill_space: None,
|
||||
external_graphics_state: None,
|
||||
external_graphics_state: ExtGState::default(),
|
||||
stroke: None,
|
||||
stroke_space: None,
|
||||
text_rendering_mode: TextRenderingMode::Fill,
|
||||
@ -191,12 +191,13 @@ impl Builder<'_, ()> {
|
||||
}
|
||||
|
||||
fn set_external_graphics_state(&mut self, graphics_state: &ExtGState) {
|
||||
let current_state = self.state.external_graphics_state.as_ref();
|
||||
if current_state != Some(graphics_state) {
|
||||
let current_state = &self.state.external_graphics_state;
|
||||
if current_state != graphics_state {
|
||||
let index = self.resources.ext_gs.insert(*graphics_state);
|
||||
let name = eco_format!("Gs{index}");
|
||||
self.content.set_parameters(Name(name.as_bytes()));
|
||||
|
||||
self.state.external_graphics_state = *graphics_state;
|
||||
if graphics_state.uses_opacities() {
|
||||
self.uses_opacities = true;
|
||||
}
|
||||
@ -204,29 +205,27 @@ impl Builder<'_, ()> {
|
||||
}
|
||||
|
||||
fn set_opacities(&mut self, stroke: Option<&FixedStroke>, fill: Option<&Paint>) {
|
||||
let stroke_opacity = stroke
|
||||
.map(|stroke| {
|
||||
let color = match &stroke.paint {
|
||||
Paint::Solid(color) => *color,
|
||||
Paint::Gradient(_) | Paint::Pattern(_) => return 255,
|
||||
};
|
||||
let get_opacity = |paint: &Paint| {
|
||||
let color = match paint {
|
||||
Paint::Solid(color) => *color,
|
||||
Paint::Gradient(_) | Paint::Pattern(_) => return 255,
|
||||
};
|
||||
|
||||
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
|
||||
})
|
||||
.unwrap_or(255);
|
||||
let fill_opacity = fill
|
||||
.map(|paint| {
|
||||
let color = match paint {
|
||||
Paint::Solid(color) => *color,
|
||||
Paint::Gradient(_) | Paint::Pattern(_) => return 255,
|
||||
};
|
||||
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
|
||||
};
|
||||
|
||||
color.alpha().map_or(255, |v| (v * 255.0).round() as u8)
|
||||
})
|
||||
.unwrap_or(255);
|
||||
let stroke_opacity = stroke.map_or(255, |stroke| get_opacity(&stroke.paint));
|
||||
let fill_opacity = fill.map_or(255, get_opacity);
|
||||
self.set_external_graphics_state(&ExtGState { stroke_opacity, fill_opacity });
|
||||
}
|
||||
|
||||
fn reset_opacities(&mut self) {
|
||||
self.set_external_graphics_state(&ExtGState {
|
||||
stroke_opacity: 255,
|
||||
fill_opacity: 255,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn transform(&mut self, transform: Transform) {
|
||||
let Transform { sx, ky, kx, sy, tx, ty } = transform;
|
||||
self.state.transform = self.state.transform.pre_concat(transform);
|
||||
@ -542,6 +541,8 @@ fn write_color_glyphs(ctx: &mut Builder, pos: Point, text: TextItemView) {
|
||||
|
||||
let mut last_font = None;
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
ctx.content.begin_text();
|
||||
ctx.content.set_text_matrix([1.0, 0.0, 0.0, -1.0, x, y]);
|
||||
// So that the next call to ctx.set_font() will change the font to one that
|
||||
@ -671,6 +672,8 @@ fn write_image(ctx: &mut Builder, x: f32, y: f32, image: &Image, size: Size) {
|
||||
image
|
||||
});
|
||||
|
||||
ctx.reset_opacities();
|
||||
|
||||
let name = eco_format!("Im{index}");
|
||||
let w = size.x.to_f32();
|
||||
let h = size.y.to_f32();
|
||||
|
@ -17,6 +17,7 @@ typst-utils = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
unicode-ident = { workspace = true }
|
||||
unicode-math-class = { workspace = true }
|
||||
unicode-script = { workspace = true }
|
||||
|
@ -91,6 +91,11 @@ impl FileId {
|
||||
Self::new(self.package().cloned(), self.vpath().join(path))
|
||||
}
|
||||
|
||||
/// The same file location, but with a different extension.
|
||||
pub fn with_extension(&self, extension: &str) -> Self {
|
||||
Self::new(self.package().cloned(), self.vpath().with_extension(extension))
|
||||
}
|
||||
|
||||
/// Construct from a raw number.
|
||||
pub(crate) const fn from_raw(v: u16) -> Self {
|
||||
Self(v)
|
||||
|
@ -650,7 +650,7 @@ impl Lexer<'_> {
|
||||
Some('-') if !s.at(['-', '?']) => {}
|
||||
Some('.') if !s.at("..") => {}
|
||||
Some('h') if !s.at("ttp://") && !s.at("ttps://") => {}
|
||||
Some('@') if !s.at(is_id_start) => {}
|
||||
Some('@') if !s.at(is_valid_in_label_literal) => {}
|
||||
_ => break,
|
||||
}
|
||||
|
||||
|
@ -1,37 +1,101 @@
|
||||
//! Package manifest parsing.
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::{self, Debug, Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
|
||||
use ecow::{eco_format, EcoString};
|
||||
use serde::de::IgnoredAny;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use unscanny::Scanner;
|
||||
|
||||
use crate::is_ident;
|
||||
|
||||
/// A type alias for a map of key-value pairs used to collect unknown fields
|
||||
/// where values are completely discarded.
|
||||
pub type UnknownFields = BTreeMap<EcoString, IgnoredAny>;
|
||||
|
||||
/// A parsed package manifest.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
///
|
||||
/// The `unknown_fields` contains fields which were found but not expected.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct PackageManifest {
|
||||
/// Details about the package itself.
|
||||
pub package: PackageInfo,
|
||||
/// Details about the template, if the package is one.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub template: Option<TemplateInfo>,
|
||||
/// The tools section for third-party configuration.
|
||||
#[serde(default)]
|
||||
pub tool: ToolInfo,
|
||||
/// All parsed but unknown fields, this can be used for validation.
|
||||
#[serde(flatten, skip_serializing)]
|
||||
pub unknown_fields: UnknownFields,
|
||||
}
|
||||
|
||||
/// The `[tool]` key in the manifest. This field can be used to retrieve
|
||||
/// 3rd-party tool configuration.
|
||||
///
|
||||
// # Examples
|
||||
/// ```
|
||||
/// # use serde::{Deserialize, Serialize};
|
||||
/// # use ecow::EcoString;
|
||||
/// # use typst_syntax::package::PackageManifest;
|
||||
/// #[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
/// struct MyTool {
|
||||
/// key: EcoString,
|
||||
/// }
|
||||
///
|
||||
/// let mut manifest: PackageManifest = toml::from_str(r#"
|
||||
/// [package]
|
||||
/// name = "package"
|
||||
/// version = "0.1.0"
|
||||
/// entrypoint = "src/lib.typ"
|
||||
///
|
||||
/// [tool.my-tool]
|
||||
/// key = "value"
|
||||
/// "#)?;
|
||||
///
|
||||
/// let my_tool = manifest
|
||||
/// .tool
|
||||
/// .sections
|
||||
/// .remove("my-tool")
|
||||
/// .ok_or("tool.my-tool section missing")?;
|
||||
/// let my_tool = MyTool::deserialize(my_tool)?;
|
||||
///
|
||||
/// assert_eq!(my_tool, MyTool { key: "value".into() });
|
||||
/// # Ok::<_, Box<dyn std::error::Error>>(())
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ToolInfo {
|
||||
/// Any fields parsed in the tool section.
|
||||
#[serde(flatten)]
|
||||
pub sections: BTreeMap<EcoString, toml::Table>,
|
||||
}
|
||||
|
||||
/// The `[template]` key in the manifest.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
///
|
||||
/// The `unknown_fields` contains fields which were found but not expected.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct TemplateInfo {
|
||||
/// The path of the starting point within the package.
|
||||
/// The directory within the package that contains the files that should be
|
||||
/// copied into the user's new project directory.
|
||||
pub path: EcoString,
|
||||
/// The path of the entrypoint relative to the starting point's `path`.
|
||||
/// A path relative to the template's path that points to the file serving
|
||||
/// as the compilation target.
|
||||
pub entrypoint: EcoString,
|
||||
/// A path relative to the package's root that points to a PNG or lossless
|
||||
/// WebP thumbnail for the template.
|
||||
pub thumbnail: EcoString,
|
||||
/// All parsed but unknown fields, this can be used for validation.
|
||||
#[serde(flatten, skip_serializing)]
|
||||
pub unknown_fields: UnknownFields,
|
||||
}
|
||||
|
||||
/// The `[package]` key in the manifest.
|
||||
///
|
||||
/// More fields are specified, but they are not relevant to the compiler.
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
/// The `unknown_fields` contains fields which were found but not expected.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct PackageInfo {
|
||||
/// The name of the package within its namespace.
|
||||
pub name: EcoString,
|
||||
@ -39,8 +103,42 @@ pub struct PackageInfo {
|
||||
pub version: PackageVersion,
|
||||
/// The path of the entrypoint into the package.
|
||||
pub entrypoint: EcoString,
|
||||
/// A list of the package's authors.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub authors: Vec<EcoString>,
|
||||
/// The package's license.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub license: Option<EcoString>,
|
||||
/// A short description of the package.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<EcoString>,
|
||||
/// A link to the package's web presence.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub homepage: Option<EcoString>,
|
||||
/// A link to the repository where this package is developed.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub repository: Option<EcoString>,
|
||||
/// An array of search keywords for the package.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub keywords: Vec<EcoString>,
|
||||
/// An array with up to three of the predefined categories to help users
|
||||
/// discover the package.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub categories: Vec<EcoString>,
|
||||
/// An array of disciplines defining the target audience for which the
|
||||
/// package is useful.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub disciplines: Vec<EcoString>,
|
||||
/// The minimum required compiler version for the package.
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub compiler: Option<VersionBound>,
|
||||
/// An array of globs specifying files that should not be part of the
|
||||
/// published bundle.
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub exclude: Vec<EcoString>,
|
||||
/// All parsed but unknown fields, this can be used for validation.
|
||||
#[serde(flatten, skip_serializing)]
|
||||
pub unknown_fields: UnknownFields,
|
||||
}
|
||||
|
||||
impl PackageManifest {
|
||||
@ -423,4 +521,97 @@ mod tests {
|
||||
assert!(!v1_1_1.matches_lt(&VersionBound::from_str("1.1").unwrap()));
|
||||
assert!(v1_1_1.matches_lt(&VersionBound::from_str("1.2").unwrap()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn minimal_manifest() {
|
||||
assert_eq!(
|
||||
toml::from_str::<PackageManifest>(
|
||||
r#"
|
||||
[package]
|
||||
name = "package"
|
||||
version = "0.1.0"
|
||||
entrypoint = "src/lib.typ"
|
||||
"#
|
||||
),
|
||||
Ok(PackageManifest {
|
||||
package: PackageInfo {
|
||||
name: "package".into(),
|
||||
version: PackageVersion { major: 0, minor: 1, patch: 0 },
|
||||
entrypoint: "src/lib.typ".into(),
|
||||
authors: vec![],
|
||||
license: None,
|
||||
description: None,
|
||||
homepage: None,
|
||||
repository: None,
|
||||
keywords: vec![],
|
||||
categories: vec![],
|
||||
disciplines: vec![],
|
||||
compiler: None,
|
||||
exclude: vec![],
|
||||
unknown_fields: BTreeMap::new(),
|
||||
},
|
||||
template: None,
|
||||
tool: ToolInfo { sections: BTreeMap::new() },
|
||||
unknown_fields: BTreeMap::new(),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn tool_section() {
|
||||
// NOTE: tool section must be table of tables, but we can't easily
|
||||
// compare the error structurally
|
||||
assert!(toml::from_str::<PackageManifest>(
|
||||
r#"
|
||||
[package]
|
||||
name = "package"
|
||||
version = "0.1.0"
|
||||
entrypoint = "src/lib.typ"
|
||||
|
||||
[tool]
|
||||
not-table = "str"
|
||||
"#
|
||||
)
|
||||
.is_err());
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
struct MyTool {
|
||||
key: EcoString,
|
||||
}
|
||||
|
||||
let mut manifest: PackageManifest = toml::from_str(
|
||||
r#"
|
||||
[package]
|
||||
name = "package"
|
||||
version = "0.1.0"
|
||||
entrypoint = "src/lib.typ"
|
||||
|
||||
[tool.my-tool]
|
||||
key = "value"
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let my_tool = manifest.tool.sections.remove("my-tool").unwrap();
|
||||
let my_tool = MyTool::deserialize(my_tool).unwrap();
|
||||
|
||||
assert_eq!(my_tool, MyTool { key: "value".into() });
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unknown_keys() {
|
||||
let manifest: PackageManifest = toml::from_str(
|
||||
r#"
|
||||
[package]
|
||||
name = "package"
|
||||
version = "0.1.0"
|
||||
entrypoint = "src/lib.typ"
|
||||
|
||||
[unknown]
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(manifest.unknown_fields.contains_key("unknown"));
|
||||
}
|
||||
}
|
||||
|
@ -395,11 +395,6 @@ fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Separate primes and superscripts to different attachments.
|
||||
if primed && p.current() == SyntaxKind::Hat {
|
||||
p.wrap(m, SyntaxKind::MathAttach);
|
||||
}
|
||||
|
||||
let Some((kind, stop, assoc, mut prec)) = math_op(p.current()) else {
|
||||
// No attachments, so we need to wrap primes as attachment.
|
||||
if primed {
|
||||
@ -431,7 +426,7 @@ fn math_expr_prec(p: &mut Parser, min_prec: usize, stop: SyntaxKind) {
|
||||
math_expr_prec(p, prec, stop);
|
||||
math_unparen(p, m2);
|
||||
|
||||
if p.eat_if(SyntaxKind::Underscore) || (!primed && p.eat_if(SyntaxKind::Hat)) {
|
||||
if p.eat_if(SyntaxKind::Underscore) || p.eat_if(SyntaxKind::Hat) {
|
||||
let m3 = p.marker();
|
||||
math_expr_prec(p, prec, SyntaxKind::End);
|
||||
math_unparen(p, m3);
|
||||
|
@ -85,6 +85,11 @@ impl VirtualPath {
|
||||
Self::new(path)
|
||||
}
|
||||
}
|
||||
|
||||
/// The same path, but with a different extension.
|
||||
pub fn with_extension(&self, extension: &str) -> Self {
|
||||
Self(self.0.with_extension(extension))
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for VirtualPath {
|
||||
|
@ -83,6 +83,15 @@ impl Span {
|
||||
self.0.get() & ((1 << Self::BITS) - 1)
|
||||
}
|
||||
|
||||
/// Return `other` if `self` is detached and `self` otherwise.
|
||||
pub fn or(self, other: Self) -> Self {
|
||||
if self.is_detached() {
|
||||
other
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve a file location relative to this span's source.
|
||||
pub fn resolve_path(self, path: &str) -> Result<FileId, EcoString> {
|
||||
let Some(file) = self.id() else {
|
||||
|
@ -1,7 +1,10 @@
|
||||
use comemo::{Tracked, TrackedMut};
|
||||
use ecow::{eco_format, EcoVec};
|
||||
use ecow::{eco_format, EcoString, EcoVec};
|
||||
|
||||
use crate::diag::{bail, error, At, HintedStrResult, SourceResult, Trace, Tracepoint};
|
||||
use crate::diag::{
|
||||
bail, error, At, HintedStrResult, HintedString, SourceDiagnostic, SourceResult,
|
||||
Trace, Tracepoint,
|
||||
};
|
||||
use crate::engine::{Engine, Sink, Traced};
|
||||
use crate::eval::{Access, Eval, FlowEvent, Route, Vm};
|
||||
use crate::foundations::{
|
||||
@ -10,7 +13,7 @@ use crate::foundations::{
|
||||
};
|
||||
use crate::introspection::Introspector;
|
||||
use crate::math::LrElem;
|
||||
use crate::syntax::ast::{self, AstNode};
|
||||
use crate::syntax::ast::{self, AstNode, Ident};
|
||||
use crate::syntax::{Span, Spanned, SyntaxNode};
|
||||
use crate::text::TextElem;
|
||||
use crate::utils::LazyHash;
|
||||
@ -32,135 +35,25 @@ impl Eval for ast::FuncCall<'_> {
|
||||
}
|
||||
|
||||
// Try to evaluate as a call to an associated function or field.
|
||||
let (callee, mut args) = if let ast::Expr::FieldAccess(access) = callee {
|
||||
let (callee, args) = if let ast::Expr::FieldAccess(access) = callee {
|
||||
let target = access.target();
|
||||
let target_span = target.span();
|
||||
let field = access.field();
|
||||
let field_span = field.span();
|
||||
|
||||
let target = if is_mutating_method(&field) {
|
||||
let mut args = args.eval(vm)?.spanned(span);
|
||||
let target = target.access(vm)?;
|
||||
|
||||
// Only arrays and dictionaries have mutable methods.
|
||||
if matches!(target, Value::Array(_) | Value::Dict(_)) {
|
||||
args.span = span;
|
||||
let point = || Tracepoint::Call(Some(field.get().clone()));
|
||||
return call_method_mut(target, &field, args, span).trace(
|
||||
vm.world(),
|
||||
point,
|
||||
span,
|
||||
);
|
||||
}
|
||||
|
||||
target.clone()
|
||||
} else {
|
||||
access.target().eval(vm)?
|
||||
};
|
||||
|
||||
let mut args = args.eval(vm)?.spanned(span);
|
||||
|
||||
// Handle plugins.
|
||||
if let Value::Plugin(plugin) = &target {
|
||||
let bytes = args.all::<Bytes>()?;
|
||||
args.finish()?;
|
||||
return Ok(plugin.call(&field, bytes).at(span)?.into_value());
|
||||
}
|
||||
|
||||
// Prioritize associated functions on the value's type (i.e.,
|
||||
// methods) over its fields. A function call on a field is only
|
||||
// allowed for functions, types, modules (because they are scopes),
|
||||
// and symbols (because they have modifiers).
|
||||
//
|
||||
// For dictionaries, it is not allowed because it would be ambiguous
|
||||
// (prioritizing associated functions would make an addition of a
|
||||
// new associated function a breaking change and prioritizing fields
|
||||
// would break associated functions for certain dictionaries).
|
||||
if let Some(callee) = target.ty().scope().get(&field) {
|
||||
let this = Arg {
|
||||
span: target_span,
|
||||
name: None,
|
||||
value: Spanned::new(target, target_span),
|
||||
};
|
||||
args.span = span;
|
||||
args.items.insert(0, this);
|
||||
(callee.clone(), args)
|
||||
} else if matches!(
|
||||
target,
|
||||
Value::Symbol(_) | Value::Func(_) | Value::Type(_) | Value::Module(_)
|
||||
) {
|
||||
(target.field(&field).at(field_span)?, args)
|
||||
} else {
|
||||
let mut error = error!(
|
||||
field_span,
|
||||
"type {} has no method `{}`",
|
||||
target.ty(),
|
||||
field.as_str()
|
||||
);
|
||||
|
||||
let mut field_hint = || {
|
||||
if target.field(&field).is_ok() {
|
||||
error.hint(eco_format!(
|
||||
"did you mean to access the field `{}`?",
|
||||
field.as_str()
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
match target {
|
||||
Value::Dict(ref dict) => {
|
||||
if matches!(dict.get(&field), Ok(Value::Func(_))) {
|
||||
error.hint(eco_format!(
|
||||
"to call the function stored in the dictionary, surround \
|
||||
the field access with parentheses, e.g. `(dict.{})(..)`",
|
||||
field.as_str(),
|
||||
));
|
||||
} else {
|
||||
field_hint();
|
||||
}
|
||||
}
|
||||
_ => field_hint(),
|
||||
}
|
||||
|
||||
bail!(error);
|
||||
match eval_field_call(target, field, args, span, vm)? {
|
||||
FieldCall::Normal(callee, args) => (callee, args),
|
||||
FieldCall::Resolved(value) => return Ok(value),
|
||||
}
|
||||
} else {
|
||||
// Function call order: we evaluate the callee before the arguments.
|
||||
(callee.eval(vm)?, args.eval(vm)?.spanned(span))
|
||||
};
|
||||
|
||||
let func_result = callee.clone().cast::<Func>();
|
||||
if in_math && func_result.is_err() {
|
||||
// For non-functions in math, we wrap the arguments in parentheses.
|
||||
let mut body = Content::empty();
|
||||
for (i, arg) in args.all::<Content>()?.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
body += TextElem::packed(',');
|
||||
}
|
||||
body += arg;
|
||||
}
|
||||
if trailing_comma {
|
||||
body += TextElem::packed(',');
|
||||
}
|
||||
return Ok(Value::Content(
|
||||
callee.display().spanned(callee_span)
|
||||
+ LrElem::new(TextElem::packed('(') + body + TextElem::packed(')'))
|
||||
.pack(),
|
||||
));
|
||||
return wrap_args_in_math(callee, callee_span, args, trailing_comma);
|
||||
}
|
||||
|
||||
let func = func_result
|
||||
.map_err(|mut err| {
|
||||
if let ast::Expr::Ident(ident) = self.callee() {
|
||||
let ident = ident.get();
|
||||
if vm.scopes.check_std_shadowed(ident) {
|
||||
err.hint(eco_format!(
|
||||
"use `std.{}` to access the shadowed standard library function",
|
||||
ident,
|
||||
));
|
||||
}
|
||||
}
|
||||
err
|
||||
})
|
||||
.map_err(|err| hint_if_shadowed_std(vm, &self.callee(), err))
|
||||
.at(callee_span)?;
|
||||
|
||||
let point = || Tracepoint::Call(func.name().map(Into::into));
|
||||
@ -371,6 +264,108 @@ pub(crate) fn call_closure(
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// This used only as the return value of `eval_field_call`.
|
||||
/// - `Normal` means that we have a function to call and the arguments to call it with.
|
||||
/// - `Resolved` means that we have already resolved the call and have the value.
|
||||
enum FieldCall {
|
||||
Normal(Value, Args),
|
||||
Resolved(Value),
|
||||
}
|
||||
|
||||
/// Evaluate a field call's callee and arguments.
|
||||
///
|
||||
/// This follows the normal function call order: we evaluate the callee before the
|
||||
/// arguments.
|
||||
///
|
||||
/// Prioritize associated functions on the value's type (e.g., methods) over its fields.
|
||||
/// A function call on a field is only allowed for functions, types, modules (because
|
||||
/// they are scopes), and symbols (because they have modifiers or associated functions).
|
||||
///
|
||||
/// For dictionaries, it is not allowed because it would be ambiguous - prioritizing
|
||||
/// associated functions would make an addition of a new associated function a breaking
|
||||
/// change and prioritizing fields would break associated functions for certain
|
||||
/// dictionaries.
|
||||
fn eval_field_call(
|
||||
target_expr: ast::Expr,
|
||||
field: Ident,
|
||||
args: ast::Args,
|
||||
span: Span,
|
||||
vm: &mut Vm,
|
||||
) -> SourceResult<FieldCall> {
|
||||
// Evaluate the field-call's target and overall arguments.
|
||||
let (target, mut args) = if is_mutating_method(&field) {
|
||||
// If `field` looks like a mutating method, we evaluate the arguments first,
|
||||
// because `target_expr.access(vm)` mutably borrows the `vm`, so that we can't
|
||||
// evaluate the arguments after it.
|
||||
let args = args.eval(vm)?.spanned(span);
|
||||
// However, this difference from the normal call order is not observable because
|
||||
// expressions like `(1, arr.len(), 2, 3).push(arr.pop())` evaluate the target to
|
||||
// a temporary which we disallow mutation on (returning an error).
|
||||
// Theoretically this could be observed if a method matching `is_mutating_method`
|
||||
// was added to some type in the future and we didn't update this function.
|
||||
match target_expr.access(vm)? {
|
||||
// Only arrays and dictionaries have mutable methods.
|
||||
target @ (Value::Array(_) | Value::Dict(_)) => {
|
||||
let value = call_method_mut(target, &field, args, span);
|
||||
let point = || Tracepoint::Call(Some(field.get().clone()));
|
||||
return Ok(FieldCall::Resolved(value.trace(vm.world(), point, span)?));
|
||||
}
|
||||
target => (target.clone(), args),
|
||||
}
|
||||
} else {
|
||||
let target = target_expr.eval(vm)?;
|
||||
let args = args.eval(vm)?.spanned(span);
|
||||
(target, args)
|
||||
};
|
||||
|
||||
if let Value::Plugin(plugin) = &target {
|
||||
// Call plugins by converting args to bytes.
|
||||
let bytes = args.all::<Bytes>()?;
|
||||
args.finish()?;
|
||||
let value = plugin.call(&field, bytes).at(span)?.into_value();
|
||||
Ok(FieldCall::Resolved(value))
|
||||
} else if let Some(callee) = target.ty().scope().get(&field) {
|
||||
args.insert(0, target_expr.span(), target);
|
||||
Ok(FieldCall::Normal(callee.clone(), args))
|
||||
} else if matches!(
|
||||
target,
|
||||
Value::Symbol(_) | Value::Func(_) | Value::Type(_) | Value::Module(_)
|
||||
) {
|
||||
// Certain value types may have their own ways to access method fields.
|
||||
// e.g. `$arrow.r(v)$`, `table.cell[..]`
|
||||
let value = target.field(&field).at(field.span())?;
|
||||
Ok(FieldCall::Normal(value, args))
|
||||
} else {
|
||||
// Otherwise we cannot call this field.
|
||||
bail!(missing_field_call_error(target, field))
|
||||
}
|
||||
}
|
||||
|
||||
/// Produce an error when we cannot call the field.
|
||||
fn missing_field_call_error(target: Value, field: Ident) -> SourceDiagnostic {
|
||||
let mut error =
|
||||
error!(field.span(), "type {} has no method `{}`", target.ty(), field.as_str());
|
||||
|
||||
match target {
|
||||
Value::Dict(ref dict) if matches!(dict.get(&field), Ok(Value::Func(_))) => {
|
||||
error.hint(eco_format!(
|
||||
"to call the function stored in the dictionary, surround \
|
||||
the field access with parentheses, e.g. `(dict.{})(..)`",
|
||||
field.as_str(),
|
||||
));
|
||||
}
|
||||
_ if target.field(&field).is_ok() => {
|
||||
error.hint(eco_format!(
|
||||
"did you mean to access the field `{}`?",
|
||||
field.as_str(),
|
||||
));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
error
|
||||
}
|
||||
|
||||
/// Check if the expression is in a math context.
|
||||
fn in_math(expr: ast::Expr) -> bool {
|
||||
match expr {
|
||||
ast::Expr::MathIdent(_) => true,
|
||||
@ -379,6 +374,46 @@ fn in_math(expr: ast::Expr) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// For non-functions in math, we wrap the arguments in parentheses.
|
||||
fn wrap_args_in_math(
|
||||
callee: Value,
|
||||
callee_span: Span,
|
||||
mut args: Args,
|
||||
trailing_comma: bool,
|
||||
) -> SourceResult<Value> {
|
||||
let mut body = Content::empty();
|
||||
for (i, arg) in args.all::<Content>()?.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
body += TextElem::packed(',');
|
||||
}
|
||||
body += arg;
|
||||
}
|
||||
if trailing_comma {
|
||||
body += TextElem::packed(',');
|
||||
}
|
||||
Ok(Value::Content(
|
||||
callee.display().spanned(callee_span)
|
||||
+ LrElem::new(TextElem::packed('(') + body + TextElem::packed(')')).pack(),
|
||||
))
|
||||
}
|
||||
|
||||
/// Provide a hint if the callee is a shadowed standard library function.
|
||||
fn hint_if_shadowed_std(
|
||||
vm: &mut Vm,
|
||||
callee: &ast::Expr,
|
||||
mut err: HintedString,
|
||||
) -> HintedString {
|
||||
if let ast::Expr::Ident(ident) = callee {
|
||||
let ident = ident.get();
|
||||
if vm.scopes.check_std_shadowed(ident) {
|
||||
err.hint(eco_format!(
|
||||
"use `std.{ident}` to access the shadowed standard library function",
|
||||
));
|
||||
}
|
||||
}
|
||||
err
|
||||
}
|
||||
|
||||
/// A visitor that determines which variables to capture for a closure.
|
||||
pub struct CapturesVisitor<'a> {
|
||||
external: Option<&'a Scopes<'a>>,
|
||||
@ -410,9 +445,11 @@ impl<'a> CapturesVisitor<'a> {
|
||||
// Identifiers that shouldn't count as captures because they
|
||||
// actually bind a new name are handled below (individually through
|
||||
// the expressions that contain them).
|
||||
Some(ast::Expr::Ident(ident)) => self.capture(&ident, Scopes::get),
|
||||
Some(ast::Expr::Ident(ident)) => {
|
||||
self.capture(ident.get(), ident.span(), Scopes::get)
|
||||
}
|
||||
Some(ast::Expr::MathIdent(ident)) => {
|
||||
self.capture(&ident, Scopes::get_in_math)
|
||||
self.capture(ident.get(), ident.span(), Scopes::get_in_math)
|
||||
}
|
||||
|
||||
// Code and content blocks create a scope.
|
||||
@ -520,13 +557,14 @@ impl<'a> CapturesVisitor<'a> {
|
||||
|
||||
/// Bind a new internal variable.
|
||||
fn bind(&mut self, ident: ast::Ident) {
|
||||
self.internal.top.define(ident.get().clone(), Value::None);
|
||||
self.internal.top.define_ident(ident, Value::None);
|
||||
}
|
||||
|
||||
/// Capture a variable if it isn't internal.
|
||||
fn capture(
|
||||
&mut self,
|
||||
ident: &str,
|
||||
ident: &EcoString,
|
||||
span: Span,
|
||||
getter: impl FnOnce(&'a Scopes<'a>, &str) -> HintedStrResult<&'a Value>,
|
||||
) {
|
||||
if self.internal.get(ident).is_err() {
|
||||
@ -538,7 +576,12 @@ impl<'a> CapturesVisitor<'a> {
|
||||
return;
|
||||
};
|
||||
|
||||
self.captures.define_captured(ident, value.clone(), self.capturer);
|
||||
self.captures.define_captured(
|
||||
ident.clone(),
|
||||
value.clone(),
|
||||
self.capturer,
|
||||
span,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -561,7 +604,7 @@ mod tests {
|
||||
visitor.visit(&root);
|
||||
|
||||
let captures = visitor.finish();
|
||||
let mut names: Vec<_> = captures.iter().map(|(k, _)| k).collect();
|
||||
let mut names: Vec<_> = captures.iter().map(|(k, ..)| k).collect();
|
||||
names.sort();
|
||||
|
||||
assert_eq!(names, result);
|
||||
|
@ -31,7 +31,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(new_name) = &new_name {
|
||||
if let Some(new_name) = new_name {
|
||||
if let ast::Expr::Ident(ident) = self.source() {
|
||||
if ident.as_str() == new_name.as_str() {
|
||||
// Warn on `import x as x`
|
||||
@ -44,7 +44,7 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
|
||||
// Define renamed module on the scope.
|
||||
vm.scopes.top.define(new_name.as_str(), source.clone());
|
||||
vm.scopes.top.define_ident(new_name, source.clone());
|
||||
}
|
||||
|
||||
let scope = source.scope().unwrap();
|
||||
@ -57,8 +57,8 @@ impl Eval for ast::ModuleImport<'_> {
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Wildcard) => {
|
||||
for (var, value) in scope.iter() {
|
||||
vm.scopes.top.define(var.clone(), value.clone());
|
||||
for (var, value, span) in scope.iter() {
|
||||
vm.scopes.top.define_spanned(var.clone(), value.clone(), span);
|
||||
}
|
||||
}
|
||||
Some(ast::Imports::Items(items)) => {
|
||||
|
@ -54,7 +54,11 @@ impl Eval for ast::MathAttach<'_> {
|
||||
|
||||
if let Some(expr) = self.top() {
|
||||
elem.push_t(Some(expr.eval_display(vm)?));
|
||||
} else if let Some(primes) = self.primes() {
|
||||
}
|
||||
|
||||
// Always attach primes in scripts style (not limits style),
|
||||
// i.e. at the top-right corner.
|
||||
if let Some(primes) = self.primes() {
|
||||
elem.push_tr(Some(primes.eval(vm)?));
|
||||
}
|
||||
|
||||
|
@ -86,7 +86,7 @@ pub fn eval(
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy();
|
||||
|
||||
Ok(Module::new(name, vm.scopes.top).with_content(output))
|
||||
Ok(Module::new(name, vm.scopes.top).with_content(output).with_file_id(id))
|
||||
}
|
||||
|
||||
/// Evaluate a string as code and return the resulting value.
|
||||
|
@ -47,7 +47,7 @@ impl<'a> Vm<'a> {
|
||||
if self.inspected == Some(var.span()) {
|
||||
self.trace(value.clone());
|
||||
}
|
||||
self.scopes.top.define(var.get().clone(), value);
|
||||
self.scopes.top.define_ident(var, value);
|
||||
}
|
||||
|
||||
/// Trace a value.
|
||||
|
@ -76,6 +76,18 @@ impl Args {
|
||||
self.items.iter().filter(|slot| slot.name.is_none()).count()
|
||||
}
|
||||
|
||||
/// Insert a positional argument at a specific index.
|
||||
pub fn insert(&mut self, index: usize, span: Span, value: Value) {
|
||||
self.items.insert(
|
||||
index,
|
||||
Arg {
|
||||
span: self.span,
|
||||
name: None,
|
||||
value: Spanned::new(value, span),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Push a positional argument.
|
||||
pub fn push(&mut self, span: Span, value: Value) {
|
||||
self.items.push(Arg {
|
||||
|
@ -259,7 +259,7 @@ impl Array {
|
||||
.ok_or_else(|| out_of_bounds_no_default(index, self.len()))
|
||||
}
|
||||
|
||||
/// Extracts a subslice of the array. Fails with an error if the start or
|
||||
/// Extracts a subslice of the array. Fails with an error if the start or end
|
||||
/// index is out of bounds.
|
||||
#[func]
|
||||
pub fn slice(
|
||||
|
@ -127,7 +127,7 @@ impl Bytes {
|
||||
.ok_or_else(|| out_of_bounds_no_default(index, self.len()))
|
||||
}
|
||||
|
||||
/// Extracts a subslice of the bytes. Fails with an error if the start or
|
||||
/// Extracts a subslice of the bytes. Fails with an error if the start or end
|
||||
/// index is out of bounds.
|
||||
#[func]
|
||||
pub fn slice(
|
||||
|
@ -261,7 +261,7 @@ pub struct ToDict(Dict);
|
||||
|
||||
cast! {
|
||||
ToDict,
|
||||
v: Module => Self(v.scope().iter().map(|(k, v)| (Str::from(k.clone()), v.clone())).collect()),
|
||||
v: Module => Self(v.scope().iter().map(|(k, v, _)| (Str::from(k.clone()), v.clone())).collect()),
|
||||
}
|
||||
|
||||
impl Debug for Dict {
|
||||
|
@ -290,7 +290,7 @@ pub fn eval(
|
||||
let dict = scope;
|
||||
let mut scope = Scope::new();
|
||||
for (key, value) in dict {
|
||||
scope.define(key, value);
|
||||
scope.define_spanned(key, value, span);
|
||||
}
|
||||
crate::eval::eval_string(engine.world, &text, span, mode, scope)
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use ecow::{eco_format, EcoString};
|
||||
|
||||
use crate::diag::StrResult;
|
||||
use crate::foundations::{repr, ty, Content, Scope, Value};
|
||||
use crate::syntax::FileId;
|
||||
|
||||
/// An evaluated module, either built-in or resulting from a file.
|
||||
///
|
||||
@ -43,6 +44,8 @@ struct Repr {
|
||||
scope: Scope,
|
||||
/// The module's layoutable contents.
|
||||
content: Content,
|
||||
/// The id of the file which defines the module, if any.
|
||||
file_id: Option<FileId>,
|
||||
}
|
||||
|
||||
impl Module {
|
||||
@ -50,7 +53,7 @@ impl Module {
|
||||
pub fn new(name: impl Into<EcoString>, scope: Scope) -> Self {
|
||||
Self {
|
||||
name: name.into(),
|
||||
inner: Arc::new(Repr { scope, content: Content::empty() }),
|
||||
inner: Arc::new(Repr { scope, content: Content::empty(), file_id: None }),
|
||||
}
|
||||
}
|
||||
|
||||
@ -72,6 +75,12 @@ impl Module {
|
||||
self
|
||||
}
|
||||
|
||||
/// Update the module's file id.
|
||||
pub fn with_file_id(mut self, file_id: FileId) -> Self {
|
||||
Arc::make_mut(&mut self.inner).file_id = Some(file_id);
|
||||
self
|
||||
}
|
||||
|
||||
/// Get the module's name.
|
||||
pub fn name(&self) -> &EcoString {
|
||||
&self.name
|
||||
@ -82,6 +91,13 @@ impl Module {
|
||||
&self.inner.scope
|
||||
}
|
||||
|
||||
/// Access the module's file id.
|
||||
///
|
||||
/// Some modules are not associated with a file, like the built-in modules.
|
||||
pub fn file_id(&self) -> Option<FileId> {
|
||||
self.inner.file_id
|
||||
}
|
||||
|
||||
/// Access the module's scope, mutably.
|
||||
pub fn scope_mut(&mut self) -> &mut Scope {
|
||||
&mut Arc::make_mut(&mut self.inner).scope
|
||||
|
@ -234,12 +234,12 @@ impl Plugin {
|
||||
let ty = func.ty(store.as_context());
|
||||
|
||||
// Check function signature.
|
||||
if ty.params().iter().any(|&v| v != wasmi::core::ValueType::I32) {
|
||||
if ty.params().iter().any(|&v| v != wasmi::core::ValType::I32) {
|
||||
bail!(
|
||||
"plugin function `{name}` has a parameter that is not a 32-bit integer"
|
||||
);
|
||||
}
|
||||
if ty.results() != [wasmi::core::ValueType::I32] {
|
||||
if ty.results() != [wasmi::core::ValType::I32] {
|
||||
bail!("plugin function `{name}` does not return exactly one 32-bit integer");
|
||||
}
|
||||
|
||||
@ -257,14 +257,14 @@ impl Plugin {
|
||||
// Collect the lengths of the argument buffers.
|
||||
let lengths = args
|
||||
.iter()
|
||||
.map(|a| wasmi::Value::I32(a.len() as i32))
|
||||
.map(|a| wasmi::Val::I32(a.len() as i32))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Store the input data.
|
||||
store.data_mut().args = args;
|
||||
|
||||
// Call the function.
|
||||
let mut code = wasmi::Value::I32(-1);
|
||||
let mut code = wasmi::Val::I32(-1);
|
||||
func.call(store.as_context_mut(), &lengths, std::slice::from_mut(&mut code))
|
||||
.map_err(|err| eco_format!("plugin panicked: {err}"))?;
|
||||
if let Some(MemoryError { offset, length, write }) =
|
||||
@ -281,8 +281,8 @@ impl Plugin {
|
||||
|
||||
// Parse the functions return value.
|
||||
match code {
|
||||
wasmi::Value::I32(0) => {}
|
||||
wasmi::Value::I32(1) => match std::str::from_utf8(&output) {
|
||||
wasmi::Val::I32(0) => {}
|
||||
wasmi::Val::I32(1) => match std::str::from_utf8(&output) {
|
||||
Ok(message) => bail!("plugin errored with: {message}"),
|
||||
Err(_) => {
|
||||
bail!("plugin errored, but did not return a valid error message")
|
||||
|
@ -9,6 +9,8 @@ use crate::foundations::{
|
||||
Element, Func, IntoValue, Module, NativeElement, NativeFunc, NativeFuncData,
|
||||
NativeType, Type, Value,
|
||||
};
|
||||
use crate::syntax::ast::{self, AstNode};
|
||||
use crate::syntax::Span;
|
||||
use crate::utils::Static;
|
||||
use crate::Library;
|
||||
|
||||
@ -152,6 +154,23 @@ impl Scope {
|
||||
/// Bind a value to a name.
|
||||
#[track_caller]
|
||||
pub fn define(&mut self, name: impl Into<EcoString>, value: impl IntoValue) {
|
||||
self.define_spanned(name, value, Span::detached())
|
||||
}
|
||||
|
||||
/// Bind a value to a name defined by an identifier.
|
||||
#[track_caller]
|
||||
pub fn define_ident(&mut self, ident: ast::Ident, value: impl IntoValue) {
|
||||
self.define_spanned(ident.get().clone(), value, ident.span())
|
||||
}
|
||||
|
||||
/// Bind a value to a name.
|
||||
#[track_caller]
|
||||
pub fn define_spanned(
|
||||
&mut self,
|
||||
name: impl Into<EcoString>,
|
||||
value: impl IntoValue,
|
||||
span: Span,
|
||||
) {
|
||||
let name = name.into();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
@ -159,8 +178,24 @@ impl Scope {
|
||||
panic!("duplicate definition: {name}");
|
||||
}
|
||||
|
||||
self.map
|
||||
.insert(name, Slot::new(value.into_value(), Kind::Normal, self.category));
|
||||
self.map.insert(
|
||||
name,
|
||||
Slot::new(value.into_value(), span, Kind::Normal, self.category),
|
||||
);
|
||||
}
|
||||
|
||||
/// Define a captured, immutable binding.
|
||||
pub fn define_captured(
|
||||
&mut self,
|
||||
name: EcoString,
|
||||
value: Value,
|
||||
capturer: Capturer,
|
||||
span: Span,
|
||||
) {
|
||||
self.map.insert(
|
||||
name,
|
||||
Slot::new(value.into_value(), span, Kind::Captured(capturer), self.category),
|
||||
);
|
||||
}
|
||||
|
||||
/// Define a native function through a Rust type that shadows the function.
|
||||
@ -191,19 +226,6 @@ impl Scope {
|
||||
self.define(module.name().clone(), module);
|
||||
}
|
||||
|
||||
/// Define a captured, immutable binding.
|
||||
pub fn define_captured(
|
||||
&mut self,
|
||||
var: impl Into<EcoString>,
|
||||
value: impl IntoValue,
|
||||
capturer: Capturer,
|
||||
) {
|
||||
self.map.insert(
|
||||
var.into(),
|
||||
Slot::new(value.into_value(), Kind::Captured(capturer), self.category),
|
||||
);
|
||||
}
|
||||
|
||||
/// Try to access a variable immutably.
|
||||
pub fn get(&self, var: &str) -> Option<&Value> {
|
||||
self.map.get(var).map(Slot::read)
|
||||
@ -217,14 +239,19 @@ impl Scope {
|
||||
.map(|res| res.map_err(HintedString::from))
|
||||
}
|
||||
|
||||
/// Get the span of a definition.
|
||||
pub fn get_span(&self, var: &str) -> Option<Span> {
|
||||
Some(self.map.get(var)?.span)
|
||||
}
|
||||
|
||||
/// Get the category of a definition.
|
||||
pub fn get_category(&self, var: &str) -> Option<Category> {
|
||||
self.map.get(var)?.category
|
||||
}
|
||||
|
||||
/// Iterate over all definitions.
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&EcoString, &Value)> {
|
||||
self.map.iter().map(|(k, v)| (k, v.read()))
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&EcoString, &Value, Span)> {
|
||||
self.map.iter().map(|(k, v)| (k, v.read(), v.span))
|
||||
}
|
||||
}
|
||||
|
||||
@ -264,6 +291,8 @@ struct Slot {
|
||||
value: Value,
|
||||
/// The kind of slot, determines how the value can be accessed.
|
||||
kind: Kind,
|
||||
/// A span associated with the stored value.
|
||||
span: Span,
|
||||
/// The category of the slot.
|
||||
category: Option<Category>,
|
||||
}
|
||||
@ -288,8 +317,8 @@ pub enum Capturer {
|
||||
|
||||
impl Slot {
|
||||
/// Create a new slot.
|
||||
fn new(value: Value, kind: Kind, category: Option<Category>) -> Self {
|
||||
Self { value, kind, category }
|
||||
fn new(value: Value, span: Span, kind: Kind, category: Option<Category>) -> Self {
|
||||
Self { value, span, kind, category }
|
||||
}
|
||||
|
||||
/// Read the value.
|
||||
|
@ -116,6 +116,6 @@ impl Tag {
|
||||
|
||||
impl Debug for Tag {
|
||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||
write!(f, "Tag({:?})", self.elem)
|
||||
write!(f, "Tag({:?})", self.elem.elem().name())
|
||||
}
|
||||
}
|
||||
|
@ -47,51 +47,7 @@ impl Packed<FlowElem> {
|
||||
styles: StyleChain,
|
||||
regions: Regions,
|
||||
) -> SourceResult<Fragment> {
|
||||
if !regions.size.x.is_finite() && regions.expand.x {
|
||||
bail!(self.span(), "cannot expand into infinite width");
|
||||
}
|
||||
if !regions.size.y.is_finite() && regions.expand.y {
|
||||
bail!(self.span(), "cannot expand into infinite height");
|
||||
}
|
||||
|
||||
// Check whether we have just a single multiple-layoutable element. In
|
||||
// that case, we do not set `expand.y` to `false`, but rather keep it at
|
||||
// its original value (since that element can take the full space).
|
||||
//
|
||||
// Consider the following code: `block(height: 5cm, pad(10pt, align(bottom, ..)))`
|
||||
// Thanks to the code below, the expansion will be passed all the way
|
||||
// through the block & pad and reach the innermost flow, so that things
|
||||
// are properly bottom-aligned.
|
||||
let mut alone = false;
|
||||
if let [child] = self.children().elements() {
|
||||
alone = child.is::<BlockElem>();
|
||||
}
|
||||
|
||||
let mut layouter = FlowLayouter::new(locator, styles, regions, alone);
|
||||
for (child, styles) in self.children().chain(&styles) {
|
||||
if let Some(elem) = child.to_packed::<TagElem>() {
|
||||
layouter.layout_tag(elem);
|
||||
} else if child.is::<FlushElem>() {
|
||||
layouter.flush(engine)?;
|
||||
} else if let Some(elem) = child.to_packed::<VElem>() {
|
||||
layouter.layout_spacing(engine, elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<ParElem>() {
|
||||
layouter.layout_par(engine, elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<BlockElem>() {
|
||||
layouter.layout_block(engine, elem, styles)?;
|
||||
} else if let Some(placed) = child.to_packed::<PlaceElem>() {
|
||||
layouter.layout_placed(engine, placed, styles)?;
|
||||
} else if child.is::<ColbreakElem>() {
|
||||
if !layouter.regions.backlog.is_empty() || layouter.regions.last.is_some()
|
||||
{
|
||||
layouter.finish_region(engine, true)?;
|
||||
}
|
||||
} else {
|
||||
bail!(child.span(), "unexpected flow child");
|
||||
}
|
||||
}
|
||||
|
||||
layouter.finish(engine)
|
||||
FlowLayouter::new(engine, self, locator, &styles, regions).layout()
|
||||
}
|
||||
}
|
||||
|
||||
@ -103,13 +59,17 @@ impl Debug for FlowElem {
|
||||
}
|
||||
|
||||
/// Performs flow layout.
|
||||
struct FlowLayouter<'a> {
|
||||
struct FlowLayouter<'a, 'e> {
|
||||
/// The engine.
|
||||
engine: &'a mut Engine<'e>,
|
||||
/// The children that will be arranged into a flow.
|
||||
flow: &'a Packed<FlowElem>,
|
||||
/// Whether this is the root flow.
|
||||
root: bool,
|
||||
/// Provides unique locations to the flow's children.
|
||||
locator: SplitLocator<'a>,
|
||||
/// The shared styles.
|
||||
styles: StyleChain<'a>,
|
||||
styles: &'a StyleChain<'a>,
|
||||
/// The regions to layout children into.
|
||||
regions: Regions<'a>,
|
||||
/// Whether the flow should expand to fill the region.
|
||||
@ -124,7 +84,7 @@ struct FlowLayouter<'a> {
|
||||
/// Spacing and layouted blocks for the current region.
|
||||
items: Vec<FlowItem>,
|
||||
/// A queue of tags that will be attached to the next frame.
|
||||
pending_tags: Vec<Tag>,
|
||||
pending_tags: Vec<&'a Tag>,
|
||||
/// A queue of floating elements.
|
||||
pending_floats: Vec<FlowItem>,
|
||||
/// Whether we have any footnotes in the current region.
|
||||
@ -157,18 +117,27 @@ enum FlowItem {
|
||||
align: Axes<FixedAlignment>,
|
||||
/// Whether the frame sticks to the item after it (for orphan prevention).
|
||||
sticky: bool,
|
||||
/// Whether the frame is movable; that is, kept together with its footnotes.
|
||||
/// Whether the frame is movable; that is, kept together with its
|
||||
/// footnotes.
|
||||
///
|
||||
/// This is true for frames created by paragraphs and [`LayoutSingle`] elements.
|
||||
/// This is true for frames created by paragraphs and
|
||||
/// [`BlockElem::single_layouter`] elements.
|
||||
movable: bool,
|
||||
},
|
||||
/// An absolutely placed frame.
|
||||
Placed {
|
||||
/// The layouted content.
|
||||
frame: Frame,
|
||||
/// Where to place the content horizontally.
|
||||
x_align: FixedAlignment,
|
||||
/// Where to place the content vertically.
|
||||
y_align: Smart<Option<FixedAlignment>>,
|
||||
/// A translation to apply to the content.
|
||||
delta: Axes<Rel<Abs>>,
|
||||
/// Whether the content floats --- i.e. collides with in-flow content.
|
||||
float: bool,
|
||||
/// The amount of space that needs to be kept between the placed content
|
||||
/// and in-flow content. Only relevant if `float` is `true`.
|
||||
clearance: Abs,
|
||||
},
|
||||
/// A footnote frame (can also be the separator).
|
||||
@ -193,24 +162,41 @@ impl FlowItem {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FlowLayouter<'a> {
|
||||
impl<'a, 'e> FlowLayouter<'a, 'e> {
|
||||
/// Create a new flow layouter.
|
||||
fn new(
|
||||
engine: &'a mut Engine<'e>,
|
||||
flow: &'a Packed<FlowElem>,
|
||||
locator: Locator<'a>,
|
||||
styles: StyleChain<'a>,
|
||||
styles: &'a StyleChain<'a>,
|
||||
mut regions: Regions<'a>,
|
||||
alone: bool,
|
||||
) -> Self {
|
||||
let expand = regions.expand;
|
||||
let root = std::mem::replace(&mut regions.root, false);
|
||||
// Check whether we have just a single multiple-layoutable element. In
|
||||
// that case, we do not set `expand.y` to `false`, but rather keep it at
|
||||
// its original value (since that element can take the full space).
|
||||
//
|
||||
// Consider the following code: `block(height: 5cm, pad(10pt,
|
||||
// align(bottom, ..)))`. Thanks to the code below, the expansion will be
|
||||
// passed all the way through the block & pad and reach the innermost
|
||||
// flow, so that things are properly bottom-aligned.
|
||||
let mut alone = false;
|
||||
if let [child] = flow.children.elements() {
|
||||
alone = child.is::<BlockElem>();
|
||||
}
|
||||
|
||||
// Disable vertical expansion when there are multiple or not directly
|
||||
// layoutable children.
|
||||
let expand = regions.expand;
|
||||
if !alone {
|
||||
regions.expand.y = false;
|
||||
}
|
||||
|
||||
// The children aren't root.
|
||||
let root = std::mem::replace(&mut regions.root, false);
|
||||
|
||||
Self {
|
||||
engine,
|
||||
flow,
|
||||
root,
|
||||
locator: locator.split(),
|
||||
styles,
|
||||
@ -223,52 +209,84 @@ impl<'a> FlowLayouter<'a> {
|
||||
pending_floats: vec![],
|
||||
has_footnotes: false,
|
||||
footnote_config: FootnoteConfig {
|
||||
separator: FootnoteEntry::separator_in(styles),
|
||||
clearance: FootnoteEntry::clearance_in(styles),
|
||||
gap: FootnoteEntry::gap_in(styles),
|
||||
separator: FootnoteEntry::separator_in(*styles),
|
||||
clearance: FootnoteEntry::clearance_in(*styles),
|
||||
gap: FootnoteEntry::gap_in(*styles),
|
||||
},
|
||||
finished: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Layout the flow.
|
||||
fn layout(mut self) -> SourceResult<Fragment> {
|
||||
for (child, styles) in self.flow.children.chain(self.styles) {
|
||||
if let Some(elem) = child.to_packed::<TagElem>() {
|
||||
self.handle_tag(elem);
|
||||
} else if let Some(elem) = child.to_packed::<VElem>() {
|
||||
self.handle_v(elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<ColbreakElem>() {
|
||||
self.handle_colbreak(elem)?;
|
||||
} else if let Some(elem) = child.to_packed::<ParElem>() {
|
||||
self.handle_par(elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<BlockElem>() {
|
||||
self.handle_block(elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<PlaceElem>() {
|
||||
self.handle_place(elem, styles)?;
|
||||
} else if let Some(elem) = child.to_packed::<FlushElem>() {
|
||||
self.handle_flush(elem)?;
|
||||
} else {
|
||||
bail!(child.span(), "unexpected flow child");
|
||||
}
|
||||
}
|
||||
|
||||
self.finish()
|
||||
}
|
||||
|
||||
/// Place explicit metadata into the flow.
|
||||
fn layout_tag(&mut self, elem: &Packed<TagElem>) {
|
||||
self.pending_tags.push(elem.tag.clone());
|
||||
fn handle_tag(&mut self, elem: &'a Packed<TagElem>) {
|
||||
self.pending_tags.push(&elem.tag);
|
||||
}
|
||||
|
||||
/// Layout vertical spacing.
|
||||
fn layout_spacing(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
v: &Packed<VElem>,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
self.layout_item(
|
||||
engine,
|
||||
match v.amount() {
|
||||
Spacing::Rel(rel) => FlowItem::Absolute(
|
||||
rel.resolve(styles).relative_to(self.initial.y),
|
||||
v.weakness(styles) > 0,
|
||||
),
|
||||
Spacing::Fr(fr) => FlowItem::Fractional(*fr),
|
||||
},
|
||||
)
|
||||
fn handle_v(&mut self, v: &'a Packed<VElem>, styles: StyleChain) -> SourceResult<()> {
|
||||
self.handle_item(match v.amount {
|
||||
Spacing::Rel(rel) => FlowItem::Absolute(
|
||||
// Resolve the spacing relative to the current base height.
|
||||
rel.resolve(styles).relative_to(self.initial.y),
|
||||
v.weakness(styles) > 0,
|
||||
),
|
||||
Spacing::Fr(fr) => FlowItem::Fractional(fr),
|
||||
})
|
||||
}
|
||||
|
||||
/// Layout a column break.
|
||||
fn handle_colbreak(&mut self, _: &'a Packed<ColbreakElem>) -> SourceResult<()> {
|
||||
// If there is still an available region, skip to it.
|
||||
// TODO: Turn this into a region abstraction.
|
||||
if !self.regions.backlog.is_empty() || self.regions.last.is_some() {
|
||||
self.finish_region(true)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Layout a paragraph.
|
||||
fn layout_par(
|
||||
fn handle_par(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
par: &Packed<ParElem>,
|
||||
par: &'a Packed<ParElem>,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
// Fetch properties.
|
||||
let align = AlignElem::alignment_in(styles).resolve(styles);
|
||||
let leading = ParElem::leading_in(styles);
|
||||
|
||||
// Layout the paragraph into lines. This only depends on the base size,
|
||||
// not on the Y position.
|
||||
let consecutive = self.last_was_par;
|
||||
let locator = self.locator.next(&par.span());
|
||||
let lines = par
|
||||
.layout(
|
||||
engine,
|
||||
self.locator.next(&par.span()),
|
||||
self.engine,
|
||||
locator,
|
||||
styles,
|
||||
consecutive,
|
||||
self.regions.base(),
|
||||
@ -280,39 +298,26 @@ impl<'a> FlowLayouter<'a> {
|
||||
// previous sticky frame to the next region (if available)
|
||||
if let Some(first) = lines.first() {
|
||||
while !self.regions.size.y.fits(first.height()) && !self.regions.in_last() {
|
||||
let mut sticky = self.items.len();
|
||||
for (i, item) in self.items.iter().enumerate().rev() {
|
||||
match *item {
|
||||
FlowItem::Absolute(_, _) => {}
|
||||
FlowItem::Frame { sticky: true, .. } => sticky = i,
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let carry: Vec<_> = self.items.drain(sticky..).collect();
|
||||
self.finish_region(engine, false)?;
|
||||
let in_last = self.regions.in_last();
|
||||
|
||||
for item in carry {
|
||||
self.layout_item(engine, item)?;
|
||||
}
|
||||
|
||||
let in_last = self.finish_region_with_migration()?;
|
||||
if in_last {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Layout the lines.
|
||||
for (i, mut frame) in lines.into_iter().enumerate() {
|
||||
if i > 0 {
|
||||
self.layout_item(engine, FlowItem::Absolute(leading, true))?;
|
||||
self.handle_item(FlowItem::Absolute(leading, true))?;
|
||||
}
|
||||
|
||||
self.drain_tag(&mut frame);
|
||||
self.layout_item(
|
||||
engine,
|
||||
FlowItem::Frame { frame, align, sticky: false, movable: true },
|
||||
)?;
|
||||
self.handle_item(FlowItem::Frame {
|
||||
frame,
|
||||
align,
|
||||
sticky: false,
|
||||
movable: true,
|
||||
})?;
|
||||
}
|
||||
|
||||
self.last_was_par = true;
|
||||
@ -320,56 +325,54 @@ impl<'a> FlowLayouter<'a> {
|
||||
}
|
||||
|
||||
/// Layout into multiple regions.
|
||||
fn layout_block(
|
||||
fn handle_block(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
block: &'a Packed<BlockElem>,
|
||||
styles: StyleChain<'a>,
|
||||
) -> SourceResult<()> {
|
||||
// Temporarily delegate rootness to the columns.
|
||||
// Fetch properties.
|
||||
let sticky = block.sticky(styles);
|
||||
let align = AlignElem::alignment_in(styles).resolve(styles);
|
||||
|
||||
// If the block is "rootable" it may host footnotes. In that case, we
|
||||
// defer rootness to it temporarily. We disable our own rootness to
|
||||
// prevent duplicate footnotes.
|
||||
let is_root = self.root;
|
||||
if is_root && block.rootable(styles) {
|
||||
self.root = false;
|
||||
self.regions.root = true;
|
||||
}
|
||||
|
||||
// Skip directly if region is already full.
|
||||
if self.regions.is_full() {
|
||||
// Skip directly if region is already full.
|
||||
self.finish_region(engine, false)?;
|
||||
self.finish_region(false)?;
|
||||
}
|
||||
|
||||
// Layout the block itself.
|
||||
let sticky = block.sticky(styles);
|
||||
let fragment = block.layout(
|
||||
engine,
|
||||
self.engine,
|
||||
self.locator.next(&block.span()),
|
||||
styles,
|
||||
self.regions,
|
||||
)?;
|
||||
|
||||
// How to align the block.
|
||||
let align = AlignElem::alignment_in(styles).resolve(styles);
|
||||
|
||||
let mut notes = Vec::new();
|
||||
for (i, mut frame) in fragment.into_iter().enumerate() {
|
||||
// Find footnotes in the frame.
|
||||
if self.root {
|
||||
find_footnotes(&mut notes, &frame);
|
||||
collect_footnotes(&mut notes, &frame);
|
||||
}
|
||||
|
||||
if i > 0 {
|
||||
self.finish_region(engine, false)?;
|
||||
self.finish_region(false)?;
|
||||
}
|
||||
|
||||
self.drain_tag(&mut frame);
|
||||
frame.post_process(styles);
|
||||
self.layout_item(
|
||||
engine,
|
||||
FlowItem::Frame { frame, align, sticky, movable: false },
|
||||
)?;
|
||||
self.handle_item(FlowItem::Frame { frame, align, sticky, movable: false })?;
|
||||
}
|
||||
|
||||
self.try_handle_footnotes(engine, notes)?;
|
||||
self.try_handle_footnotes(notes)?;
|
||||
|
||||
self.root = is_root;
|
||||
self.regions.root = false;
|
||||
@ -379,50 +382,56 @@ impl<'a> FlowLayouter<'a> {
|
||||
}
|
||||
|
||||
/// Layout a placed element.
|
||||
fn layout_placed(
|
||||
fn handle_place(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
placed: &Packed<PlaceElem>,
|
||||
placed: &'a Packed<PlaceElem>,
|
||||
styles: StyleChain,
|
||||
) -> SourceResult<()> {
|
||||
// Fetch properties.
|
||||
let float = placed.float(styles);
|
||||
let clearance = placed.clearance(styles);
|
||||
let alignment = placed.alignment(styles);
|
||||
let delta = Axes::new(placed.dx(styles), placed.dy(styles)).resolve(styles);
|
||||
|
||||
let x_align = alignment.map_or(FixedAlignment::Center, |align| {
|
||||
align.x().unwrap_or_default().resolve(styles)
|
||||
});
|
||||
let y_align = alignment.map(|align| align.y().map(|y| y.resolve(styles)));
|
||||
|
||||
let mut frame = placed
|
||||
.layout(
|
||||
engine,
|
||||
self.engine,
|
||||
self.locator.next(&placed.span()),
|
||||
styles,
|
||||
self.regions.base(),
|
||||
)?
|
||||
.into_frame();
|
||||
|
||||
frame.post_process(styles);
|
||||
let item = FlowItem::Placed { frame, x_align, y_align, delta, float, clearance };
|
||||
self.layout_item(engine, item)
|
||||
|
||||
self.handle_item(FlowItem::Placed {
|
||||
frame,
|
||||
x_align,
|
||||
y_align,
|
||||
delta,
|
||||
float,
|
||||
clearance,
|
||||
})
|
||||
}
|
||||
|
||||
/// Attach currently pending metadata to the frame.
|
||||
fn drain_tag(&mut self, frame: &mut Frame) {
|
||||
if !self.pending_tags.is_empty() && !frame.is_empty() {
|
||||
frame.prepend_multiple(
|
||||
self.pending_tags
|
||||
.drain(..)
|
||||
.map(|tag| (Point::zero(), FrameItem::Tag(tag))),
|
||||
);
|
||||
/// Lays out all floating elements before continuing with other content.
|
||||
fn handle_flush(&mut self, _: &'a Packed<FlushElem>) -> SourceResult<()> {
|
||||
for item in std::mem::take(&mut self.pending_floats) {
|
||||
self.handle_item(item)?;
|
||||
}
|
||||
while !self.pending_floats.is_empty() {
|
||||
self.finish_region(false)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Layout a finished frame.
|
||||
fn layout_item(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
mut item: FlowItem,
|
||||
) -> SourceResult<()> {
|
||||
fn handle_item(&mut self, mut item: FlowItem) -> SourceResult<()> {
|
||||
match item {
|
||||
FlowItem::Absolute(v, weak) => {
|
||||
if weak
|
||||
@ -439,24 +448,24 @@ impl<'a> FlowLayouter<'a> {
|
||||
FlowItem::Frame { ref frame, movable, .. } => {
|
||||
let height = frame.height();
|
||||
while !self.regions.size.y.fits(height) && !self.regions.in_last() {
|
||||
self.finish_region(engine, false)?;
|
||||
self.finish_region(false)?;
|
||||
}
|
||||
|
||||
let in_last = self.regions.in_last();
|
||||
self.regions.size.y -= height;
|
||||
if self.root && movable {
|
||||
let mut notes = Vec::new();
|
||||
find_footnotes(&mut notes, frame);
|
||||
collect_footnotes(&mut notes, frame);
|
||||
self.items.push(item);
|
||||
|
||||
// When we are already in_last, we can directly force the
|
||||
// footnotes.
|
||||
if !self.handle_footnotes(engine, &mut notes, true, in_last)? {
|
||||
if !self.handle_footnotes(&mut notes, true, in_last)? {
|
||||
let item = self.items.pop();
|
||||
self.finish_region(engine, false)?;
|
||||
self.finish_region(false)?;
|
||||
self.items.extend(item);
|
||||
self.regions.size.y -= height;
|
||||
self.handle_footnotes(engine, &mut notes, true, true)?;
|
||||
self.handle_footnotes(&mut notes, true, true)?;
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
@ -504,8 +513,8 @@ impl<'a> FlowLayouter<'a> {
|
||||
// Find footnotes in the frame.
|
||||
if self.root {
|
||||
let mut notes = vec![];
|
||||
find_footnotes(&mut notes, frame);
|
||||
self.try_handle_footnotes(engine, notes)?;
|
||||
collect_footnotes(&mut notes, frame);
|
||||
self.try_handle_footnotes(notes)?;
|
||||
}
|
||||
}
|
||||
FlowItem::Footnote(_) => {}
|
||||
@ -515,12 +524,49 @@ impl<'a> FlowLayouter<'a> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Attach currently pending metadata to the frame.
|
||||
fn drain_tag(&mut self, frame: &mut Frame) {
|
||||
if !self.pending_tags.is_empty() && !frame.is_empty() {
|
||||
frame.prepend_multiple(
|
||||
self.pending_tags
|
||||
.drain(..)
|
||||
.map(|tag| (Point::zero(), FrameItem::Tag(tag.clone()))),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Finisht the region, migrating all sticky items to the next one.
|
||||
///
|
||||
/// Returns whether we migrated into a last region.
|
||||
fn finish_region_with_migration(&mut self) -> SourceResult<bool> {
|
||||
// Find the suffix of sticky items.
|
||||
let mut sticky = self.items.len();
|
||||
for (i, item) in self.items.iter().enumerate().rev() {
|
||||
match *item {
|
||||
FlowItem::Absolute(_, _) => {}
|
||||
FlowItem::Frame { sticky: true, .. } => sticky = i,
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
let carry: Vec<_> = self.items.drain(sticky..).collect();
|
||||
self.finish_region(false)?;
|
||||
|
||||
let in_last = self.regions.in_last();
|
||||
for item in carry {
|
||||
self.handle_item(item)?;
|
||||
}
|
||||
|
||||
Ok(in_last)
|
||||
}
|
||||
|
||||
/// Finish the frame for one region.
|
||||
///
|
||||
/// Set `force` to `true` to allow creating a frame for out-of-flow elements
|
||||
/// only (this is used to force the creation of a frame in case the
|
||||
/// remaining elements are all out-of-flow).
|
||||
fn finish_region(&mut self, engine: &mut Engine, force: bool) -> SourceResult<()> {
|
||||
fn finish_region(&mut self, force: bool) -> SourceResult<()> {
|
||||
// Early return if we don't have any relevant items.
|
||||
if !force
|
||||
&& !self.items.is_empty()
|
||||
&& self.items.iter().all(FlowItem::is_out_of_flow)
|
||||
@ -585,6 +631,13 @@ impl<'a> FlowLayouter<'a> {
|
||||
size.y = self.initial.y;
|
||||
}
|
||||
|
||||
if !self.regions.size.x.is_finite() && self.expand.x {
|
||||
bail!(self.flow.span(), "cannot expand into infinite width");
|
||||
}
|
||||
if !self.regions.size.y.is_finite() && self.expand.y {
|
||||
bail!(self.flow.span(), "cannot expand into infinite height");
|
||||
}
|
||||
|
||||
let mut output = Frame::soft(size);
|
||||
let mut ruler = FixedAlignment::Start;
|
||||
let mut float_top_offset = Abs::zero();
|
||||
@ -653,7 +706,9 @@ impl<'a> FlowLayouter<'a> {
|
||||
if force && !self.pending_tags.is_empty() {
|
||||
let pos = Point::with_y(offset);
|
||||
output.push_multiple(
|
||||
self.pending_tags.drain(..).map(|tag| (pos, FrameItem::Tag(tag))),
|
||||
self.pending_tags
|
||||
.drain(..)
|
||||
.map(|tag| (pos, FrameItem::Tag(tag.clone()))),
|
||||
);
|
||||
}
|
||||
|
||||
@ -665,62 +720,42 @@ impl<'a> FlowLayouter<'a> {
|
||||
|
||||
// Try to place floats into the next region.
|
||||
for item in std::mem::take(&mut self.pending_floats) {
|
||||
self.layout_item(engine, item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lays out all floating elements before continuing with other content.
|
||||
fn flush(&mut self, engine: &mut Engine) -> SourceResult<()> {
|
||||
for item in std::mem::take(&mut self.pending_floats) {
|
||||
self.layout_item(engine, item)?;
|
||||
}
|
||||
while !self.pending_floats.is_empty() {
|
||||
self.finish_region(engine, false)?;
|
||||
self.handle_item(item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Finish layouting and return the resulting fragment.
|
||||
fn finish(mut self, engine: &mut Engine) -> SourceResult<Fragment> {
|
||||
fn finish(mut self) -> SourceResult<Fragment> {
|
||||
if self.expand.y {
|
||||
while !self.regions.backlog.is_empty() {
|
||||
self.finish_region(engine, true)?;
|
||||
self.finish_region(true)?;
|
||||
}
|
||||
}
|
||||
|
||||
self.finish_region(engine, true)?;
|
||||
self.finish_region(true)?;
|
||||
while !self.items.is_empty() {
|
||||
self.finish_region(engine, true)?;
|
||||
self.finish_region(true)?;
|
||||
}
|
||||
|
||||
Ok(Fragment::frames(self.finished))
|
||||
}
|
||||
}
|
||||
|
||||
impl FlowLayouter<'_> {
|
||||
/// Tries to process all footnotes in the frame, placing them
|
||||
/// in the next region if they could not be placed in the current
|
||||
/// one.
|
||||
fn try_handle_footnotes(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
mut notes: Vec<Packed<FootnoteElem>>,
|
||||
) -> SourceResult<()> {
|
||||
// When we are already in_last, we can directly force the
|
||||
// footnotes.
|
||||
if self.root
|
||||
&& !self.handle_footnotes(
|
||||
engine,
|
||||
&mut notes,
|
||||
false,
|
||||
self.regions.in_last(),
|
||||
)?
|
||||
&& !self.handle_footnotes(&mut notes, false, self.regions.in_last())?
|
||||
{
|
||||
self.finish_region(engine, false)?;
|
||||
self.handle_footnotes(engine, &mut notes, false, true)?;
|
||||
self.finish_region(false)?;
|
||||
self.handle_footnotes(&mut notes, false, true)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -731,7 +766,6 @@ impl FlowLayouter<'_> {
|
||||
/// regions.
|
||||
fn handle_footnotes(
|
||||
&mut self,
|
||||
engine: &mut Engine,
|
||||
notes: &mut Vec<Packed<FootnoteElem>>,
|
||||
movable: bool,
|
||||
force: bool,
|
||||
@ -750,16 +784,16 @@ impl FlowLayouter<'_> {
|
||||
}
|
||||
|
||||
if !self.has_footnotes {
|
||||
self.layout_footnote_separator(engine)?;
|
||||
self.layout_footnote_separator()?;
|
||||
}
|
||||
|
||||
self.regions.size.y -= self.footnote_config.gap;
|
||||
let frames = FootnoteEntry::new(notes[k].clone())
|
||||
.pack()
|
||||
.layout(
|
||||
engine,
|
||||
self.engine,
|
||||
Locator::synthesize(notes[k].location().unwrap()),
|
||||
self.styles,
|
||||
*self.styles,
|
||||
self.regions.with_root(false),
|
||||
)?
|
||||
.into_frames();
|
||||
@ -780,10 +814,10 @@ impl FlowLayouter<'_> {
|
||||
|
||||
let prev = notes.len();
|
||||
for (i, frame) in frames.into_iter().enumerate() {
|
||||
find_footnotes(notes, &frame);
|
||||
collect_footnotes(notes, &frame);
|
||||
if i > 0 {
|
||||
self.finish_region(engine, false)?;
|
||||
self.layout_footnote_separator(engine)?;
|
||||
self.finish_region(false)?;
|
||||
self.layout_footnote_separator()?;
|
||||
self.regions.size.y -= self.footnote_config.gap;
|
||||
}
|
||||
self.regions.size.y -= frame.height();
|
||||
@ -804,14 +838,14 @@ impl FlowLayouter<'_> {
|
||||
}
|
||||
|
||||
/// Layout and save the footnote separator, typically a line.
|
||||
fn layout_footnote_separator(&mut self, engine: &mut Engine) -> SourceResult<()> {
|
||||
fn layout_footnote_separator(&mut self) -> SourceResult<()> {
|
||||
let expand = Axes::new(self.regions.expand.x, false);
|
||||
let pod = Regions::one(self.regions.base(), expand);
|
||||
let separator = &self.footnote_config.separator;
|
||||
|
||||
// FIXME: Shouldn't use `root()` here.
|
||||
let mut frame = separator
|
||||
.layout(engine, Locator::root(), self.styles, pod)?
|
||||
.layout(self.engine, Locator::root(), *self.styles, pod)?
|
||||
.into_frame();
|
||||
frame.size_mut().y += self.footnote_config.clearance;
|
||||
frame.translate(Point::with_y(self.footnote_config.clearance));
|
||||
@ -824,11 +858,11 @@ impl FlowLayouter<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds all footnotes in the frame.
|
||||
fn find_footnotes(notes: &mut Vec<Packed<FootnoteElem>>, frame: &Frame) {
|
||||
/// Collect all footnotes in a frame.
|
||||
fn collect_footnotes(notes: &mut Vec<Packed<FootnoteElem>>, frame: &Frame) {
|
||||
for (_, item) in frame.items() {
|
||||
match item {
|
||||
FrameItem::Group(group) => find_footnotes(notes, &group.frame),
|
||||
FrameItem::Group(group) => collect_footnotes(notes, &group.frame),
|
||||
FrameItem::Tag(tag)
|
||||
if !notes.iter().any(|note| note.location() == tag.elem.location()) =>
|
||||
{
|
||||
|
@ -79,7 +79,7 @@ impl<'a> Item<'a> {
|
||||
}
|
||||
|
||||
/// The natural layouted width of the item.
|
||||
pub fn width(&self) -> Abs {
|
||||
pub fn natural_width(&self) -> Abs {
|
||||
match self {
|
||||
Self::Text(shaped) => shaped.width,
|
||||
Self::Absolute(v, _) => *v,
|
||||
@ -201,7 +201,7 @@ pub fn collect<'a>(
|
||||
);
|
||||
let peeked = iter.peek().and_then(|(child, _)| {
|
||||
if let Some(elem) = child.to_packed::<TextElem>() {
|
||||
elem.text().chars().next()
|
||||
elem.text().chars().find(|c| !is_default_ignorable(*c))
|
||||
} else if child.is::<SmartQuoteElem>() {
|
||||
Some('"')
|
||||
} else if child.is::<SpaceElem>()
|
||||
@ -302,7 +302,7 @@ impl<'a> Collector<'a> {
|
||||
}
|
||||
|
||||
fn push_segment(&mut self, segment: Segment<'a>, is_quote: bool) {
|
||||
if let Some(last) = self.full.chars().last() {
|
||||
if let Some(last) = self.full.chars().rev().find(|c| !is_default_ignorable(*c)) {
|
||||
self.quoter.last(last, is_quote);
|
||||
}
|
||||
|
||||
|
@ -1,11 +1,18 @@
|
||||
use unicode_bidi::BidiInfo;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
use super::*;
|
||||
use crate::engine::Engine;
|
||||
use crate::layout::{Abs, Em, Fr, Frame, FrameItem, Point};
|
||||
use crate::text::TextElem;
|
||||
use crate::layout::{Abs, Dir, Em, Fr, Frame, FrameItem, Point};
|
||||
use crate::text::{Lang, TextElem};
|
||||
use crate::utils::Numeric;
|
||||
|
||||
const SHY: char = '\u{ad}';
|
||||
const HYPHEN: char = '-';
|
||||
const EN_DASH: char = '–';
|
||||
const EM_DASH: char = '—';
|
||||
const LINE_SEPARATOR: char = '\u{2028}'; // We use LS to distinguish justified breaks.
|
||||
|
||||
/// A layouted line, consisting of a sequence of layouted paragraph items that
|
||||
/// are mostly borrowed from the preparation phase. This type enables you to
|
||||
/// measure the size of a line in a range before committing to building the
|
||||
@ -16,20 +23,9 @@ use crate::utils::Numeric;
|
||||
/// line, respectively. But even those can partially reuse previous results when
|
||||
/// the break index is safe-to-break per rustybuzz.
|
||||
pub struct Line<'a> {
|
||||
/// Bidi information about the paragraph.
|
||||
pub bidi: &'a BidiInfo<'a>,
|
||||
/// The trimmed range the line spans in the paragraph.
|
||||
pub trimmed: Range,
|
||||
/// The untrimmed end where the line ends.
|
||||
pub end: usize,
|
||||
/// A reshaped text item if the line sliced up a text item at the start.
|
||||
pub first: Option<Item<'a>>,
|
||||
/// Inner items which don't need to be reprocessed.
|
||||
pub inner: &'a [Item<'a>],
|
||||
/// A reshaped text item if the line sliced up a text item at the end. If
|
||||
/// there is only one text item, this takes precedence over `first`.
|
||||
pub last: Option<Item<'a>>,
|
||||
/// The width of the line.
|
||||
/// The items the line is made of.
|
||||
pub items: Items<'a>,
|
||||
/// The exact natural width of the line.
|
||||
pub width: Abs,
|
||||
/// Whether the line should be justified.
|
||||
pub justify: bool,
|
||||
@ -39,45 +35,27 @@ pub struct Line<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Line<'a> {
|
||||
/// Iterate over the line's items.
|
||||
pub fn items(&self) -> impl Iterator<Item = &Item<'a>> {
|
||||
self.first.iter().chain(self.inner).chain(&self.last)
|
||||
}
|
||||
|
||||
/// Return items that intersect the given `text_range`.
|
||||
pub fn slice(&self, text_range: Range) -> impl Iterator<Item = &Item<'a>> {
|
||||
let mut cursor = self.trimmed.start;
|
||||
let mut start = 0;
|
||||
let mut end = 0;
|
||||
|
||||
for (i, item) in self.items().enumerate() {
|
||||
if cursor <= text_range.start {
|
||||
start = i;
|
||||
}
|
||||
|
||||
let len = item.textual_len();
|
||||
if cursor < text_range.end || cursor + len <= text_range.end {
|
||||
end = i + 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
cursor += len;
|
||||
/// Create an empty line.
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
items: Items::new(),
|
||||
width: Abs::zero(),
|
||||
justify: false,
|
||||
dash: None,
|
||||
}
|
||||
|
||||
self.items().skip(start).take(end - start)
|
||||
}
|
||||
|
||||
/// How many glyphs are in the text where we can insert additional
|
||||
/// space when encountering underfull lines.
|
||||
pub fn justifiables(&self) -> usize {
|
||||
let mut count = 0;
|
||||
for shaped in self.items().filter_map(Item::text) {
|
||||
for shaped in self.items.iter().filter_map(Item::text) {
|
||||
count += shaped.justifiables();
|
||||
}
|
||||
|
||||
// CJK character at line end should not be adjusted.
|
||||
if self
|
||||
.items()
|
||||
.items
|
||||
.last()
|
||||
.and_then(Item::text)
|
||||
.map(|s| s.cjk_justifiable_at_last())
|
||||
@ -89,19 +67,37 @@ impl<'a> Line<'a> {
|
||||
count
|
||||
}
|
||||
|
||||
/// How much can the line stretch
|
||||
/// How much the line can stretch.
|
||||
pub fn stretchability(&self) -> Abs {
|
||||
self.items().filter_map(Item::text).map(|s| s.stretchability()).sum()
|
||||
self.items
|
||||
.iter()
|
||||
.filter_map(Item::text)
|
||||
.map(|s| s.stretchability())
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// How much can the line shrink
|
||||
/// How much the line can shrink.
|
||||
pub fn shrinkability(&self) -> Abs {
|
||||
self.items().filter_map(Item::text).map(|s| s.shrinkability()).sum()
|
||||
self.items
|
||||
.iter()
|
||||
.filter_map(Item::text)
|
||||
.map(|s| s.shrinkability())
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Whether the line has items with negative width.
|
||||
pub fn has_negative_width_items(&self) -> bool {
|
||||
self.items.iter().any(|item| match item {
|
||||
Item::Absolute(amount, _) => *amount < Abs::zero(),
|
||||
Item::Frame(frame, _) => frame.width() < Abs::zero(),
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
|
||||
/// The sum of fractions in the line.
|
||||
pub fn fr(&self) -> Fr {
|
||||
self.items()
|
||||
self.items
|
||||
.iter()
|
||||
.filter_map(|item| match item {
|
||||
Item::Fractional(fr, _) => Some(*fr),
|
||||
_ => None,
|
||||
@ -113,232 +109,299 @@ impl<'a> Line<'a> {
|
||||
/// A dash at the end of a line.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
|
||||
pub enum Dash {
|
||||
/// A hyphen added to break a word.
|
||||
SoftHyphen,
|
||||
/// Regular hyphen, present in a compound word, e.g. beija-flor.
|
||||
HardHyphen,
|
||||
/// An em dash.
|
||||
Long,
|
||||
/// An en dash.
|
||||
Short,
|
||||
/// A soft hyphen added to break a word.
|
||||
Soft,
|
||||
/// A regular hyphen, present in a compound word, e.g. beija-flor.
|
||||
Hard,
|
||||
/// Another kind of dash. Only relevant for cost computation.
|
||||
Other,
|
||||
}
|
||||
|
||||
/// Create a line which spans the given range.
|
||||
pub fn line<'a>(
|
||||
engine: &Engine,
|
||||
p: &'a Preparation,
|
||||
mut range: Range,
|
||||
range: Range,
|
||||
breakpoint: Breakpoint,
|
||||
prepend_hyphen: bool,
|
||||
pred: Option<&Line>,
|
||||
) -> Line<'a> {
|
||||
let end = range.end;
|
||||
let mut justify =
|
||||
p.justify && end < p.bidi.text.len() && breakpoint != Breakpoint::Mandatory;
|
||||
// The line's full text.
|
||||
let full = &p.text[range.clone()];
|
||||
|
||||
// Whether the line is justified.
|
||||
let justify = full.ends_with(LINE_SEPARATOR)
|
||||
|| (p.justify && breakpoint != Breakpoint::Mandatory);
|
||||
|
||||
// Process dashes.
|
||||
let dash = if breakpoint == Breakpoint::Hyphen || full.ends_with(SHY) {
|
||||
Some(Dash::Soft)
|
||||
} else if full.ends_with(HYPHEN) {
|
||||
Some(Dash::Hard)
|
||||
} else if full.ends_with([EN_DASH, EM_DASH]) {
|
||||
Some(Dash::Other)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Trim the line at the end, if necessary for this breakpoint.
|
||||
let trim = range.start + breakpoint.trim(full).len();
|
||||
|
||||
// Collect the items for the line.
|
||||
let mut items = collect_items(engine, p, range, trim);
|
||||
|
||||
// Add a hyphen at the line start, if a previous dash should be repeated.
|
||||
if pred.map_or(false, |pred| should_repeat_hyphen(pred, full)) {
|
||||
if let Some(shaped) = items.first_text_mut() {
|
||||
shaped.prepend_hyphen(engine, p.fallback);
|
||||
}
|
||||
}
|
||||
|
||||
// Add a hyphen at the line end, if we ended on a soft hyphen.
|
||||
if dash == Some(Dash::Soft) {
|
||||
if let Some(shaped) = items.last_text_mut() {
|
||||
shaped.push_hyphen(engine, p.fallback);
|
||||
}
|
||||
}
|
||||
|
||||
// Deal with CJ characters at line boundaries.
|
||||
adjust_cj_at_line_boundaries(p, full, &mut items);
|
||||
|
||||
// Compute the line's width.
|
||||
let width = items.iter().map(Item::natural_width).sum();
|
||||
|
||||
Line { items, width, justify, dash }
|
||||
}
|
||||
|
||||
/// Collects / reshapes all items for the line with the given `range`.
|
||||
///
|
||||
/// The `trim` defines an end position to which text items are trimmed. For
|
||||
/// example, the `range` may span "hello\n", but the `trim` specifies that the
|
||||
/// linebreak is trimmed.
|
||||
///
|
||||
/// We do not factor the `trim` diredctly into the `range` because we still want
|
||||
/// to keep non-text items after the trim (e.g. tags).
|
||||
fn collect_items<'a>(
|
||||
engine: &Engine,
|
||||
p: &'a Preparation,
|
||||
range: Range,
|
||||
trim: usize,
|
||||
) -> Items<'a> {
|
||||
let mut items = Items::new();
|
||||
let mut fallback = None;
|
||||
|
||||
// Collect the items for each consecutively ordered run.
|
||||
reorder(p, range.clone(), |subrange, rtl| {
|
||||
let from = items.len();
|
||||
collect_range(engine, p, subrange, trim, &mut items, &mut fallback);
|
||||
if rtl {
|
||||
items.reorder(from);
|
||||
}
|
||||
});
|
||||
|
||||
// Trim weak spacing at the start of the line.
|
||||
let prefix = items
|
||||
.iter()
|
||||
.take_while(|item| matches!(item, Item::Absolute(_, true)))
|
||||
.count();
|
||||
if prefix > 0 {
|
||||
items.drain(..prefix);
|
||||
}
|
||||
|
||||
// Trim weak spacing at the end of the line.
|
||||
while matches!(items.last(), Some(Item::Absolute(_, true))) {
|
||||
items.pop();
|
||||
}
|
||||
|
||||
// Add fallback text to expand the line height, if necessary.
|
||||
if !items.iter().any(|item| matches!(item, Item::Text(_))) {
|
||||
if let Some(fallback) = fallback {
|
||||
items.push(fallback);
|
||||
}
|
||||
}
|
||||
|
||||
items
|
||||
}
|
||||
|
||||
/// Calls `f` for the the BiDi-reordered ranges of a line.
|
||||
fn reorder<F>(p: &Preparation, range: Range, mut f: F)
|
||||
where
|
||||
F: FnMut(Range, bool),
|
||||
{
|
||||
// If there is nothing bidirectional going on, skip reordering.
|
||||
let Some(bidi) = &p.bidi else {
|
||||
f(range, p.dir == Dir::RTL);
|
||||
return;
|
||||
};
|
||||
|
||||
// The bidi crate panics for empty lines.
|
||||
if range.is_empty() {
|
||||
return Line {
|
||||
bidi: &p.bidi,
|
||||
end,
|
||||
trimmed: range,
|
||||
first: None,
|
||||
inner: &[],
|
||||
last: None,
|
||||
width: Abs::zero(),
|
||||
justify,
|
||||
dash: None,
|
||||
f(range, p.dir == Dir::RTL);
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the paragraph that contains the line.
|
||||
let para = bidi
|
||||
.paragraphs
|
||||
.iter()
|
||||
.find(|para| para.range.contains(&range.start))
|
||||
.unwrap();
|
||||
|
||||
// Compute the reordered ranges in visual order (left to right).
|
||||
let (levels, runs) = bidi.visual_runs(para, range.clone());
|
||||
|
||||
// Call `f` for each run.
|
||||
for run in runs {
|
||||
let rtl = levels[run.start].is_rtl();
|
||||
f(run, rtl)
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects / reshapes all items for the given `subrange` with continous
|
||||
/// direction.
|
||||
fn collect_range<'a>(
|
||||
engine: &Engine,
|
||||
p: &'a Preparation,
|
||||
range: Range,
|
||||
trim: usize,
|
||||
items: &mut Items<'a>,
|
||||
fallback: &mut Option<ItemEntry<'a>>,
|
||||
) {
|
||||
for (subrange, item) in p.slice(range.clone()) {
|
||||
// All non-text items are just kept, they can't be split.
|
||||
let Item::Text(shaped) = item else {
|
||||
items.push(item);
|
||||
continue;
|
||||
};
|
||||
}
|
||||
|
||||
// Slice out the relevant items.
|
||||
let (mut expanded, mut inner) = p.slice(range.clone());
|
||||
let mut width = Abs::zero();
|
||||
// The intersection range of the item, the subrange, and the line's
|
||||
// trimming.
|
||||
let sliced =
|
||||
range.start.max(subrange.start)..range.end.min(subrange.end).min(trim);
|
||||
|
||||
// Weak space (`Absolute(_, true)`) is removed at the end of the line
|
||||
while let Some((Item::Absolute(_, true), before)) = inner.split_last() {
|
||||
inner = before;
|
||||
range.end -= 1;
|
||||
expanded.end -= 1;
|
||||
}
|
||||
// Weak space (`Absolute(_, true)`) is removed at the beginning of the line
|
||||
while let Some((Item::Absolute(_, true), after)) = inner.split_first() {
|
||||
inner = after;
|
||||
range.start += 1;
|
||||
expanded.end += 1;
|
||||
}
|
||||
// Whether the item is split by the line.
|
||||
let split = subrange.start < sliced.start || sliced.end < subrange.end;
|
||||
|
||||
// Reshape the last item if it's split in half or hyphenated.
|
||||
let mut last = None;
|
||||
let mut dash = None;
|
||||
if let Some((Item::Text(shaped), before)) = inner.split_last() {
|
||||
// Compute the range we want to shape, trimming whitespace at the
|
||||
// end of the line.
|
||||
let base = expanded.end - shaped.text.len();
|
||||
let start = range.start.max(base);
|
||||
let text = &p.bidi.text[start..range.end];
|
||||
// U+200B ZERO WIDTH SPACE is used to provide a line break opportunity,
|
||||
// we want to trim it too.
|
||||
let trimmed = text.trim_end().trim_end_matches('\u{200B}');
|
||||
range.end = start + trimmed.len();
|
||||
|
||||
// Deal with hyphens, dashes and justification.
|
||||
let shy = trimmed.ends_with('\u{ad}');
|
||||
let hyphen = breakpoint == Breakpoint::Hyphen;
|
||||
dash = if hyphen || shy {
|
||||
Some(Dash::SoftHyphen)
|
||||
} else if trimmed.ends_with('-') {
|
||||
Some(Dash::HardHyphen)
|
||||
} else if trimmed.ends_with('–') {
|
||||
Some(Dash::Short)
|
||||
} else if trimmed.ends_with('—') {
|
||||
Some(Dash::Long)
|
||||
if sliced.is_empty() {
|
||||
// When there is no text, still keep this as a fallback item, which
|
||||
// we can use to force a non-zero line-height when the line doesn't
|
||||
// contain any other text.
|
||||
*fallback = Some(ItemEntry::from(Item::Text(shaped.empty())));
|
||||
} else if split {
|
||||
// When the item is split in half, reshape it.
|
||||
let reshaped = shaped.reshape(engine, sliced);
|
||||
items.push(Item::Text(reshaped));
|
||||
} else {
|
||||
None
|
||||
};
|
||||
justify |= text.ends_with('\u{2028}');
|
||||
// When the item is fully contained, just keep it.
|
||||
items.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Deal with CJK punctuation at line ends.
|
||||
let gb_style = cjk_punct_style(shaped.lang, shaped.region);
|
||||
let maybe_adjust_last_glyph = trimmed.ends_with(END_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && trimmed.ends_with(is_of_cj_script));
|
||||
/// Add spacing around punctuation marks for CJ glyphs at line boundaries.
|
||||
///
|
||||
/// See Requirements for Chinese Text Layout, Section 3.1.6.3 Compression of
|
||||
/// punctuation marks at line start or line end.
|
||||
fn adjust_cj_at_line_boundaries(p: &Preparation, text: &str, items: &mut Items) {
|
||||
if text.starts_with(BEGIN_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && text.starts_with(is_of_cj_script))
|
||||
{
|
||||
adjust_cj_at_line_start(p, items);
|
||||
}
|
||||
|
||||
// Usually, we don't want to shape an empty string because:
|
||||
// - We don't want the height of trimmed whitespace in a different font
|
||||
// to be considered for the line height.
|
||||
// - Even if it's in the same font, its unnecessary.
|
||||
if text.ends_with(END_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && text.ends_with(is_of_cj_script))
|
||||
{
|
||||
adjust_cj_at_line_end(p, items);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add spacing around punctuation marks for CJ glyphs at the line start.
|
||||
fn adjust_cj_at_line_start(p: &Preparation, items: &mut Items) {
|
||||
let Some(shaped) = items.first_text_mut() else { return };
|
||||
let Some(glyph) = shaped.glyphs.first() else { return };
|
||||
|
||||
if glyph.is_cjk_right_aligned_punctuation() {
|
||||
// If the first glyph is a CJK punctuation, we want to
|
||||
// shrink it.
|
||||
let glyph = shaped.glyphs.to_mut().first_mut().unwrap();
|
||||
let shrink = glyph.shrinkability().0;
|
||||
glyph.shrink_left(shrink);
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
} else if p.cjk_latin_spacing && glyph.is_cj_script() && glyph.x_offset > Em::zero() {
|
||||
// If the first glyph is a CJK character adjusted by
|
||||
// [`add_cjk_latin_spacing`], restore the original width.
|
||||
let glyph = shaped.glyphs.to_mut().first_mut().unwrap();
|
||||
let shrink = glyph.x_offset;
|
||||
glyph.x_advance -= shrink;
|
||||
glyph.x_offset = Em::zero();
|
||||
glyph.adjustability.shrinkability.0 = Em::zero();
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
}
|
||||
}
|
||||
|
||||
/// Add spacing around punctuation marks for CJ glyphs at the line end.
|
||||
fn adjust_cj_at_line_end(p: &Preparation, items: &mut Items) {
|
||||
let Some(shaped) = items.last_text_mut() else { return };
|
||||
let Some(glyph) = shaped.glyphs.last() else { return };
|
||||
|
||||
// Deal with CJK punctuation at line ends.
|
||||
let style = cjk_punct_style(shaped.lang, shaped.region);
|
||||
|
||||
if glyph.is_cjk_left_aligned_punctuation(style) {
|
||||
// If the last glyph is a CJK punctuation, we want to
|
||||
// shrink it.
|
||||
let shrink = glyph.shrinkability().1;
|
||||
let punct = shaped.glyphs.to_mut().last_mut().unwrap();
|
||||
punct.shrink_right(shrink);
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
} else if p.cjk_latin_spacing
|
||||
&& glyph.is_cj_script()
|
||||
&& (glyph.x_advance - glyph.x_offset) > Em::one()
|
||||
{
|
||||
// If the last glyph is a CJK character adjusted by
|
||||
// [`add_cjk_latin_spacing`], restore the original width.
|
||||
let shrink = glyph.x_advance - glyph.x_offset - Em::one();
|
||||
let glyph = shaped.glyphs.to_mut().last_mut().unwrap();
|
||||
glyph.x_advance -= shrink;
|
||||
glyph.adjustability.shrinkability.1 = Em::zero();
|
||||
shaped.width -= shrink.at(shaped.size);
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether a hyphen should be inserted at the start of the next line.
|
||||
fn should_repeat_hyphen(pred_line: &Line, text: &str) -> bool {
|
||||
// If the predecessor line does not end with a `Dash::Hard`, we shall
|
||||
// not place a hyphen at the start of the next line.
|
||||
if pred_line.dash != Some(Dash::Hard) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// The hyphen should repeat only in the languages that require that feature.
|
||||
// For more information see the discussion at https://github.com/typst/typst/issues/3235
|
||||
let Some(Item::Text(shaped)) = pred_line.items.last() else { return false };
|
||||
|
||||
match shaped.lang {
|
||||
// - Lower Sorbian: see https://dolnoserbski.de/ortografija/psawidla/K3
|
||||
// - Czech: see https://prirucka.ujc.cas.cz/?id=164
|
||||
// - Croatian: see http://pravopis.hr/pravilo/spojnica/68/
|
||||
// - Polish: see https://www.ortograf.pl/zasady-pisowni/lacznik-zasady-pisowni
|
||||
// - Portuguese: see https://www2.senado.leg.br/bdsf/bitstream/handle/id/508145/000997415.pdf (Base XX)
|
||||
// - Slovak: see https://www.zones.sk/studentske-prace/gramatika/10620-pravopis-rozdelovanie-slov/
|
||||
Lang::LOWER_SORBIAN
|
||||
| Lang::CZECH
|
||||
| Lang::CROATIAN
|
||||
| Lang::POLISH
|
||||
| Lang::PORTUGUESE
|
||||
| Lang::SLOVAK => true,
|
||||
|
||||
// In Spanish the hyphen is required only if the word next to hyphen is
|
||||
// not capitalized. Otherwise, the hyphen must not be repeated.
|
||||
//
|
||||
// There is one exception though. When the whole line is empty, we need
|
||||
// the shaped empty string to make the line the appropriate height. That
|
||||
// is the case exactly if the string is empty and there are no other
|
||||
// items in the line.
|
||||
if hyphen
|
||||
|| start + shaped.text.len() > range.end
|
||||
|| maybe_adjust_last_glyph
|
||||
|| prepend_hyphen
|
||||
{
|
||||
if hyphen || start < range.end || before.is_empty() {
|
||||
let mut reshaped = shaped.reshape(engine, &p.spans, start..range.end);
|
||||
if hyphen || shy {
|
||||
reshaped.push_hyphen(engine, p.fallback);
|
||||
}
|
||||
// See § 4.1.1.1.2.e on the "Ortografía de la lengua española"
|
||||
// https://www.rae.es/ortografía/como-signo-de-división-de-palabras-a-final-de-línea
|
||||
Lang::SPANISH => text.chars().next().map_or(false, |c| !c.is_uppercase()),
|
||||
|
||||
if let Some(last_glyph) = reshaped.glyphs.last() {
|
||||
if last_glyph.is_cjk_left_aligned_punctuation(gb_style) {
|
||||
// If the last glyph is a CJK punctuation, we want to
|
||||
// shrink it. See Requirements for Chinese Text Layout,
|
||||
// Section 3.1.6.3 Compression of punctuation marks at
|
||||
// line start or line end
|
||||
let shrink_amount = last_glyph.shrinkability().1;
|
||||
let punct = reshaped.glyphs.to_mut().last_mut().unwrap();
|
||||
punct.shrink_right(shrink_amount);
|
||||
reshaped.width -= shrink_amount.at(reshaped.size);
|
||||
} else if p.cjk_latin_spacing
|
||||
&& last_glyph.is_cj_script()
|
||||
&& (last_glyph.x_advance - last_glyph.x_offset) > Em::one()
|
||||
{
|
||||
// If the last glyph is a CJK character adjusted by
|
||||
// [`add_cjk_latin_spacing`], restore the original
|
||||
// width.
|
||||
let shrink_amount =
|
||||
last_glyph.x_advance - last_glyph.x_offset - Em::one();
|
||||
let glyph = reshaped.glyphs.to_mut().last_mut().unwrap();
|
||||
glyph.x_advance -= shrink_amount;
|
||||
glyph.adjustability.shrinkability.1 = Em::zero();
|
||||
reshaped.width -= shrink_amount.at(reshaped.size);
|
||||
}
|
||||
}
|
||||
|
||||
width += reshaped.width;
|
||||
last = Some(Item::Text(reshaped));
|
||||
}
|
||||
|
||||
inner = before;
|
||||
}
|
||||
}
|
||||
|
||||
// Deal with CJ characters at line starts.
|
||||
let text = &p.bidi.text[range.start..end];
|
||||
let maybe_adjust_first_glyph = text.starts_with(BEGIN_PUNCT_PAT)
|
||||
|| (p.cjk_latin_spacing && text.starts_with(is_of_cj_script));
|
||||
|
||||
// Reshape the start item if it's split in half.
|
||||
let mut first = None;
|
||||
if let Some((Item::Text(shaped), after)) = inner.split_first() {
|
||||
// Compute the range we want to shape.
|
||||
let base = expanded.start;
|
||||
let end = range.end.min(base + shaped.text.len());
|
||||
|
||||
// Reshape if necessary.
|
||||
if range.start + shaped.text.len() > end
|
||||
|| maybe_adjust_first_glyph
|
||||
|| prepend_hyphen
|
||||
{
|
||||
// If the range is empty, we don't want to push an empty text item.
|
||||
if range.start < end {
|
||||
let reshaped = shaped.reshape(engine, &p.spans, range.start..end);
|
||||
width += reshaped.width;
|
||||
first = Some(Item::Text(reshaped));
|
||||
}
|
||||
|
||||
inner = after;
|
||||
}
|
||||
}
|
||||
|
||||
if prepend_hyphen {
|
||||
let reshaped = first.as_mut().or(last.as_mut()).and_then(Item::text_mut);
|
||||
if let Some(reshaped) = reshaped {
|
||||
let width_before = reshaped.width;
|
||||
reshaped.prepend_hyphen(engine, p.fallback);
|
||||
width += reshaped.width - width_before;
|
||||
}
|
||||
}
|
||||
|
||||
if maybe_adjust_first_glyph {
|
||||
let reshaped = first.as_mut().or(last.as_mut()).and_then(Item::text_mut);
|
||||
if let Some(reshaped) = reshaped {
|
||||
if let Some(first_glyph) = reshaped.glyphs.first() {
|
||||
if first_glyph.is_cjk_right_aligned_punctuation() {
|
||||
// If the first glyph is a CJK punctuation, we want to
|
||||
// shrink it.
|
||||
let shrink_amount = first_glyph.shrinkability().0;
|
||||
let glyph = reshaped.glyphs.to_mut().first_mut().unwrap();
|
||||
glyph.shrink_left(shrink_amount);
|
||||
let amount_abs = shrink_amount.at(reshaped.size);
|
||||
reshaped.width -= amount_abs;
|
||||
width -= amount_abs;
|
||||
} else if p.cjk_latin_spacing
|
||||
&& first_glyph.is_cj_script()
|
||||
&& first_glyph.x_offset > Em::zero()
|
||||
{
|
||||
// If the first glyph is a CJK character adjusted by
|
||||
// [`add_cjk_latin_spacing`], restore the original width.
|
||||
let shrink_amount = first_glyph.x_offset;
|
||||
let glyph = reshaped.glyphs.to_mut().first_mut().unwrap();
|
||||
glyph.x_advance -= shrink_amount;
|
||||
glyph.x_offset = Em::zero();
|
||||
glyph.adjustability.shrinkability.0 = Em::zero();
|
||||
let amount_abs = shrink_amount.at(reshaped.size);
|
||||
reshaped.width -= amount_abs;
|
||||
width -= amount_abs;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Measure the inner items.
|
||||
for item in inner {
|
||||
width += item.width();
|
||||
}
|
||||
|
||||
Line {
|
||||
bidi: &p.bidi,
|
||||
trimmed: range,
|
||||
end,
|
||||
first,
|
||||
inner,
|
||||
last,
|
||||
width,
|
||||
justify,
|
||||
dash,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -354,18 +417,19 @@ pub fn commit(
|
||||
let mut remaining = width - line.width - p.hang;
|
||||
let mut offset = Abs::zero();
|
||||
|
||||
// Reorder the line from logical to visual order.
|
||||
let (reordered, starts_rtl) = reorder(line);
|
||||
if !starts_rtl {
|
||||
// We always build the line from left to right. In an LTR paragraph, we must
|
||||
// thus add the hanging indent to the offset. When the paragraph is RTL, the
|
||||
// hanging indent arises naturally due to the line width.
|
||||
if p.dir == Dir::LTR {
|
||||
offset += p.hang;
|
||||
}
|
||||
|
||||
// Handle hanging punctuation to the left.
|
||||
if let Some(Item::Text(text)) = reordered.first() {
|
||||
if let Some(Item::Text(text)) = line.items.first() {
|
||||
if let Some(glyph) = text.glyphs.first() {
|
||||
if !text.dir.is_positive()
|
||||
&& TextElem::overhang_in(text.styles)
|
||||
&& (reordered.len() > 1 || text.glyphs.len() > 1)
|
||||
&& (line.items.len() > 1 || text.glyphs.len() > 1)
|
||||
{
|
||||
let amount = overhang(glyph.c) * glyph.x_advance.at(text.size);
|
||||
offset -= amount;
|
||||
@ -375,11 +439,11 @@ pub fn commit(
|
||||
}
|
||||
|
||||
// Handle hanging punctuation to the right.
|
||||
if let Some(Item::Text(text)) = reordered.last() {
|
||||
if let Some(Item::Text(text)) = line.items.last() {
|
||||
if let Some(glyph) = text.glyphs.last() {
|
||||
if text.dir.is_positive()
|
||||
&& TextElem::overhang_in(text.styles)
|
||||
&& (reordered.len() > 1 || text.glyphs.len() > 1)
|
||||
&& (line.items.len() > 1 || text.glyphs.len() > 1)
|
||||
{
|
||||
let amount = overhang(glyph.c) * glyph.x_advance.at(text.size);
|
||||
remaining += amount;
|
||||
@ -397,16 +461,16 @@ pub fn commit(
|
||||
let mut extra_justification = Abs::zero();
|
||||
|
||||
let shrinkability = line.shrinkability();
|
||||
let stretch = line.stretchability();
|
||||
let stretchability = line.stretchability();
|
||||
if remaining < Abs::zero() && shrinkability > Abs::zero() && shrink {
|
||||
// Attempt to reduce the length of the line, using shrinkability.
|
||||
justification_ratio = (remaining / shrinkability).max(-1.0);
|
||||
remaining = (remaining + shrinkability).min(Abs::zero());
|
||||
} else if line.justify && fr.is_zero() {
|
||||
// Attempt to increase the length of the line, using stretchability.
|
||||
if stretch > Abs::zero() {
|
||||
justification_ratio = (remaining / stretch).min(1.0);
|
||||
remaining = (remaining - stretch).max(Abs::zero());
|
||||
if stretchability > Abs::zero() {
|
||||
justification_ratio = (remaining / stretchability).min(1.0);
|
||||
remaining = (remaining - stretchability).max(Abs::zero());
|
||||
}
|
||||
|
||||
let justifiables = line.justifiables();
|
||||
@ -422,7 +486,7 @@ pub fn commit(
|
||||
|
||||
// Build the frames and determine the height and baseline.
|
||||
let mut frames = vec![];
|
||||
for item in reordered {
|
||||
for item in line.items.iter() {
|
||||
let mut push = |offset: &mut Abs, frame: Frame| {
|
||||
let width = frame.width();
|
||||
top.set_max(frame.baseline());
|
||||
@ -449,8 +513,12 @@ pub fn commit(
|
||||
}
|
||||
}
|
||||
Item::Text(shaped) => {
|
||||
let mut frame =
|
||||
shaped.build(engine, justification_ratio, extra_justification);
|
||||
let mut frame = shaped.build(
|
||||
engine,
|
||||
&p.spans,
|
||||
justification_ratio,
|
||||
extra_justification,
|
||||
);
|
||||
frame.post_process(shaped.styles);
|
||||
push(&mut offset, frame);
|
||||
}
|
||||
@ -488,46 +556,6 @@ pub fn commit(
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Return a line's items in visual order.
|
||||
fn reorder<'a>(line: &'a Line<'a>) -> (Vec<&Item<'a>>, bool) {
|
||||
let mut reordered = vec![];
|
||||
|
||||
// The bidi crate doesn't like empty lines.
|
||||
if line.trimmed.is_empty() {
|
||||
return (line.slice(line.trimmed.clone()).collect(), false);
|
||||
}
|
||||
|
||||
// Find the paragraph that contains the line.
|
||||
let para = line
|
||||
.bidi
|
||||
.paragraphs
|
||||
.iter()
|
||||
.find(|para| para.range.contains(&line.trimmed.start))
|
||||
.unwrap();
|
||||
|
||||
// Compute the reordered ranges in visual order (left to right).
|
||||
let (levels, runs) = line.bidi.visual_runs(para, line.trimmed.clone());
|
||||
let starts_rtl = levels.first().is_some_and(|level| level.is_rtl());
|
||||
|
||||
// Collect the reordered items.
|
||||
for run in runs {
|
||||
// Skip reset L1 runs because handling them would require reshaping
|
||||
// again in some cases.
|
||||
if line.bidi.levels[run.start] != levels[run.start] {
|
||||
continue;
|
||||
}
|
||||
|
||||
let prev = reordered.len();
|
||||
reordered.extend(line.slice(run.clone()));
|
||||
|
||||
if levels[run.start].is_rtl() {
|
||||
reordered[prev..].reverse();
|
||||
}
|
||||
}
|
||||
|
||||
(reordered, starts_rtl)
|
||||
}
|
||||
|
||||
/// How much a character should hang into the end margin.
|
||||
///
|
||||
/// For more discussion, see:
|
||||
@ -548,3 +576,119 @@ fn overhang(c: char) -> f64 {
|
||||
_ => 0.0,
|
||||
}
|
||||
}
|
||||
|
||||
/// A collection of owned or borrowed paragraph items.
|
||||
pub struct Items<'a>(Vec<ItemEntry<'a>>);
|
||||
|
||||
impl<'a> Items<'a> {
|
||||
/// Create empty items.
|
||||
pub fn new() -> Self {
|
||||
Self(vec![])
|
||||
}
|
||||
|
||||
/// Push a new item.
|
||||
pub fn push(&mut self, entry: impl Into<ItemEntry<'a>>) {
|
||||
self.0.push(entry.into());
|
||||
}
|
||||
|
||||
/// Iterate over the items
|
||||
pub fn iter(&self) -> impl Iterator<Item = &Item<'a>> {
|
||||
self.0.iter().map(|item| &**item)
|
||||
}
|
||||
|
||||
/// Access the first item.
|
||||
pub fn first(&self) -> Option<&Item<'a>> {
|
||||
self.0.first().map(|item| &**item)
|
||||
}
|
||||
|
||||
/// Access the last item.
|
||||
pub fn last(&self) -> Option<&Item<'a>> {
|
||||
self.0.last().map(|item| &**item)
|
||||
}
|
||||
|
||||
/// Access the first item mutably, if it is text.
|
||||
pub fn first_text_mut(&mut self) -> Option<&mut ShapedText<'a>> {
|
||||
self.0.first_mut()?.text_mut()
|
||||
}
|
||||
|
||||
/// Access the last item mutably, if it is text.
|
||||
pub fn last_text_mut(&mut self) -> Option<&mut ShapedText<'a>> {
|
||||
self.0.last_mut()?.text_mut()
|
||||
}
|
||||
|
||||
/// Reorder the items starting at the given index to RTL.
|
||||
pub fn reorder(&mut self, from: usize) {
|
||||
self.0[from..].reverse()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> FromIterator<ItemEntry<'a>> for Items<'a> {
|
||||
fn from_iter<I: IntoIterator<Item = ItemEntry<'a>>>(iter: I) -> Self {
|
||||
Self(iter.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for Items<'a> {
|
||||
type Target = Vec<ItemEntry<'a>>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DerefMut for Items<'a> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
/// A reference to or a boxed item.
|
||||
pub enum ItemEntry<'a> {
|
||||
Ref(&'a Item<'a>),
|
||||
Box(Box<Item<'a>>),
|
||||
}
|
||||
|
||||
impl<'a> ItemEntry<'a> {
|
||||
fn text_mut(&mut self) -> Option<&mut ShapedText<'a>> {
|
||||
match self {
|
||||
Self::Ref(item) => {
|
||||
let text = item.text()?;
|
||||
*self = Self::Box(Box::new(Item::Text(text.clone())));
|
||||
match self {
|
||||
Self::Box(item) => item.text_mut(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
Self::Box(item) => item.text_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for ItemEntry<'a> {
|
||||
type Target = Item<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
Self::Ref(item) => item,
|
||||
Self::Box(item) => item,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for ItemEntry<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
||||
(**self).fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a Item<'a>> for ItemEntry<'a> {
|
||||
fn from(item: &'a Item<'a>) -> Self {
|
||||
Self::Ref(item)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<Item<'a>> for ItemEntry<'a> {
|
||||
fn from(item: Item<'a>) -> Self {
|
||||
Self::Box(Box::new(item))
|
||||
}
|
||||
}
|
||||
|
@ -9,8 +9,8 @@ use comemo::{Track, Tracked, TrackedMut};
|
||||
|
||||
use self::collect::{collect, Item, Segment, SpanMapper};
|
||||
use self::finalize::finalize;
|
||||
use self::line::{commit, line, Dash, Line};
|
||||
use self::linebreak::{linebreak, Breakpoint};
|
||||
use self::line::{commit, line, Line};
|
||||
use self::linebreak::{is_default_ignorable, linebreak, Breakpoint};
|
||||
use self::prepare::{prepare, Preparation};
|
||||
use self::shaping::{
|
||||
cjk_punct_style, is_of_cj_script, shape_range, ShapedGlyph, ShapedText,
|
||||
|
@ -13,16 +13,24 @@ use crate::text::{Costs, Lang, TextElem};
|
||||
/// Only when a line break falls onto a text index that is not safe-to-break per
|
||||
/// rustybuzz, we have to reshape that portion.
|
||||
pub struct Preparation<'a> {
|
||||
/// The paragraph's full text.
|
||||
pub text: &'a str,
|
||||
/// Bidirectional text embedding levels for the paragraph.
|
||||
pub bidi: BidiInfo<'a>,
|
||||
///
|
||||
/// This is `None` if the paragraph is BiDi-uniform (all the base direction).
|
||||
pub bidi: Option<BidiInfo<'a>>,
|
||||
/// Text runs, spacing and layouted elements.
|
||||
pub items: Vec<Item<'a>>,
|
||||
pub items: Vec<(Range, Item<'a>)>,
|
||||
/// Maps from byte indices to item indices.
|
||||
pub indices: Vec<usize>,
|
||||
/// The span mapper.
|
||||
pub spans: SpanMapper,
|
||||
/// Whether to hyphenate if it's the same for all children.
|
||||
pub hyphenate: Option<bool>,
|
||||
/// Costs for various layout decisions.
|
||||
pub costs: Costs,
|
||||
/// The dominant direction.
|
||||
pub dir: Dir,
|
||||
/// The text language if it's the same for all children.
|
||||
pub lang: Option<Lang>,
|
||||
/// The paragraph's resolved horizontal alignment.
|
||||
@ -44,46 +52,18 @@ pub struct Preparation<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Preparation<'a> {
|
||||
/// Find the item that contains the given `text_offset`.
|
||||
pub fn find(&self, text_offset: usize) -> Option<&Item<'a>> {
|
||||
let mut cursor = 0;
|
||||
for item in &self.items {
|
||||
let end = cursor + item.textual_len();
|
||||
if (cursor..end).contains(&text_offset) {
|
||||
return Some(item);
|
||||
}
|
||||
cursor = end;
|
||||
}
|
||||
None
|
||||
/// Get the item that contains the given `text_offset`.
|
||||
pub fn get(&self, offset: usize) -> &(Range, Item<'a>) {
|
||||
let idx = self.indices.get(offset).copied().unwrap_or(0);
|
||||
&self.items[idx]
|
||||
}
|
||||
|
||||
/// Return the items that intersect the given `text_range`.
|
||||
///
|
||||
/// Returns the expanded range around the items and the items.
|
||||
pub fn slice(&self, text_range: Range) -> (Range, &[Item<'a>]) {
|
||||
let mut cursor = 0;
|
||||
let mut start = 0;
|
||||
let mut end = 0;
|
||||
let mut expanded = text_range.clone();
|
||||
|
||||
for (i, item) in self.items.iter().enumerate() {
|
||||
if cursor <= text_range.start {
|
||||
start = i;
|
||||
expanded.start = cursor;
|
||||
}
|
||||
|
||||
let len = item.textual_len();
|
||||
if cursor < text_range.end || cursor + len <= text_range.end {
|
||||
end = i + 1;
|
||||
expanded.end = cursor + len;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
cursor += len;
|
||||
}
|
||||
|
||||
(expanded, &self.items[start..end])
|
||||
/// Iterate over the items that intersect the given `sliced` range.
|
||||
pub fn slice(&self, sliced: Range) -> impl Iterator<Item = &(Range, Item<'a>)> {
|
||||
let start = self.indices.get(sliced.start).copied().unwrap_or(0);
|
||||
self.items[start..].iter().take_while(move |(range, _)| {
|
||||
range.start < sliced.end || range.end <= sliced.end
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,42 +79,57 @@ pub fn prepare<'a>(
|
||||
spans: SpanMapper,
|
||||
styles: StyleChain<'a>,
|
||||
) -> SourceResult<Preparation<'a>> {
|
||||
let bidi = BidiInfo::new(
|
||||
text,
|
||||
match TextElem::dir_in(styles) {
|
||||
Dir::LTR => Some(BidiLevel::ltr()),
|
||||
Dir::RTL => Some(BidiLevel::rtl()),
|
||||
_ => None,
|
||||
},
|
||||
);
|
||||
let dir = TextElem::dir_in(styles);
|
||||
let default_level = match dir {
|
||||
Dir::RTL => BidiLevel::rtl(),
|
||||
_ => BidiLevel::ltr(),
|
||||
};
|
||||
|
||||
let bidi = BidiInfo::new(text, Some(default_level));
|
||||
let is_bidi = bidi
|
||||
.levels
|
||||
.iter()
|
||||
.any(|level| level.is_ltr() != default_level.is_ltr());
|
||||
|
||||
let mut cursor = 0;
|
||||
let mut items = Vec::with_capacity(segments.len());
|
||||
|
||||
// Shape the text to finalize the items.
|
||||
for segment in segments {
|
||||
let end = cursor + segment.textual_len();
|
||||
let len = segment.textual_len();
|
||||
let end = cursor + len;
|
||||
let range = cursor..end;
|
||||
|
||||
match segment {
|
||||
Segment::Text(_, styles) => {
|
||||
shape_range(&mut items, engine, &bidi, cursor..end, &spans, styles);
|
||||
shape_range(&mut items, engine, text, &bidi, range, styles);
|
||||
}
|
||||
Segment::Item(item) => items.push(item),
|
||||
Segment::Item(item) => items.push((range, item)),
|
||||
}
|
||||
|
||||
cursor = end;
|
||||
}
|
||||
|
||||
// Build the mapping from byte to item indices.
|
||||
let mut indices = Vec::with_capacity(text.len());
|
||||
for (i, (range, _)) in items.iter().enumerate() {
|
||||
indices.extend(range.clone().map(|_| i));
|
||||
}
|
||||
|
||||
let cjk_latin_spacing = TextElem::cjk_latin_spacing_in(styles).is_auto();
|
||||
if cjk_latin_spacing {
|
||||
add_cjk_latin_spacing(&mut items);
|
||||
}
|
||||
|
||||
Ok(Preparation {
|
||||
bidi,
|
||||
text,
|
||||
bidi: is_bidi.then_some(bidi),
|
||||
items,
|
||||
indices,
|
||||
spans,
|
||||
hyphenate: children.shared_get(styles, TextElem::hyphenate_in),
|
||||
costs: TextElem::costs_in(styles),
|
||||
dir,
|
||||
lang: children.shared_get(styles, TextElem::lang_in),
|
||||
align: AlignElem::alignment_in(styles).resolve(styles).x,
|
||||
justify: ParElem::justify_in(styles),
|
||||
@ -150,10 +145,14 @@ pub fn prepare<'a>(
|
||||
/// Add some spacing between Han characters and western characters. See
|
||||
/// Requirements for Chinese Text Layout, Section 3.2.2 Mixed Text Composition
|
||||
/// in Horizontal Written Mode
|
||||
fn add_cjk_latin_spacing(items: &mut [Item]) {
|
||||
let mut items = items.iter_mut().filter(|x| !matches!(x, Item::Tag(_))).peekable();
|
||||
fn add_cjk_latin_spacing(items: &mut [(Range, Item)]) {
|
||||
let mut items = items
|
||||
.iter_mut()
|
||||
.filter(|(_, x)| !matches!(x, Item::Tag(_)))
|
||||
.peekable();
|
||||
|
||||
let mut prev: Option<&ShapedGlyph> = None;
|
||||
while let Some(item) = items.next() {
|
||||
while let Some((_, item)) = items.next() {
|
||||
let Some(text) = item.text_mut() else {
|
||||
prev = None;
|
||||
continue;
|
||||
@ -168,7 +167,7 @@ fn add_cjk_latin_spacing(items: &mut [Item]) {
|
||||
let next = glyphs.peek().map(|n| n as _).or_else(|| {
|
||||
items
|
||||
.peek()
|
||||
.and_then(|i| i.text())
|
||||
.and_then(|(_, i)| i.text())
|
||||
.and_then(|shaped| shaped.glyphs.first())
|
||||
});
|
||||
|
||||
|
@ -14,7 +14,6 @@ use super::{Item, Range, SpanMapper};
|
||||
use crate::engine::Engine;
|
||||
use crate::foundations::{Smart, StyleChain};
|
||||
use crate::layout::{Abs, Dir, Em, Frame, FrameItem, Point, Size};
|
||||
use crate::syntax::Span;
|
||||
use crate::text::{
|
||||
decorate, families, features, variant, Font, FontVariant, Glyph, Lang, Region,
|
||||
TextElem, TextItem,
|
||||
@ -27,6 +26,7 @@ use crate::World;
|
||||
/// This type contains owned or borrowed shaped text runs, which can be
|
||||
/// measured, used to reshape substrings more quickly and converted into a
|
||||
/// frame.
|
||||
#[derive(Clone)]
|
||||
pub struct ShapedText<'a> {
|
||||
/// The start of the text in the full paragraph.
|
||||
pub base: usize,
|
||||
@ -80,8 +80,6 @@ pub struct ShapedGlyph {
|
||||
pub safe_to_break: bool,
|
||||
/// The first char in this glyph's cluster.
|
||||
pub c: char,
|
||||
/// The source code location of the glyph and its byte offset within it.
|
||||
pub span: (Span, u16),
|
||||
/// Whether this glyph is justifiable for CJK scripts.
|
||||
pub is_justifiable: bool,
|
||||
/// The script of the glyph.
|
||||
@ -214,6 +212,7 @@ impl<'a> ShapedText<'a> {
|
||||
pub fn build(
|
||||
&self,
|
||||
engine: &Engine,
|
||||
spans: &SpanMapper,
|
||||
justification_ratio: f64,
|
||||
extra_justification: Abs,
|
||||
) -> Frame {
|
||||
@ -268,7 +267,7 @@ impl<'a> ShapedText<'a> {
|
||||
// We may not be able to reach the offset completely if
|
||||
// it exceeds u16, but better to have a roughly correct
|
||||
// span offset than nothing.
|
||||
let mut span = shaped.span;
|
||||
let mut span = spans.span_at(shaped.range.start);
|
||||
span.1 = span.1.saturating_add(span_offset.saturating_as());
|
||||
|
||||
// |<---- a Glyph ---->|
|
||||
@ -331,7 +330,7 @@ impl<'a> ShapedText<'a> {
|
||||
}
|
||||
|
||||
/// Measure the top and bottom extent of this text.
|
||||
fn measure(&self, engine: &Engine) -> (Abs, Abs) {
|
||||
pub fn measure(&self, engine: &Engine) -> (Abs, Abs) {
|
||||
let mut top = Abs::zero();
|
||||
let mut bottom = Abs::zero();
|
||||
|
||||
@ -409,12 +408,7 @@ impl<'a> ShapedText<'a> {
|
||||
/// shaping process if possible.
|
||||
///
|
||||
/// The text `range` is relative to the whole paragraph.
|
||||
pub fn reshape(
|
||||
&'a self,
|
||||
engine: &Engine,
|
||||
spans: &SpanMapper,
|
||||
text_range: Range,
|
||||
) -> ShapedText<'a> {
|
||||
pub fn reshape(&'a self, engine: &Engine, text_range: Range) -> ShapedText<'a> {
|
||||
let text = &self.text[text_range.start - self.base..text_range.end - self.base];
|
||||
if let Some(glyphs) = self.slice_safe_to_break(text_range.clone()) {
|
||||
#[cfg(debug_assertions)]
|
||||
@ -436,7 +430,6 @@ impl<'a> ShapedText<'a> {
|
||||
engine,
|
||||
text_range.start,
|
||||
text,
|
||||
spans,
|
||||
self.styles,
|
||||
self.dir,
|
||||
self.lang,
|
||||
@ -445,6 +438,16 @@ impl<'a> ShapedText<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Derive an empty text run with the same properties as this one.
|
||||
pub fn empty(&self) -> Self {
|
||||
Self {
|
||||
text: "",
|
||||
width: Abs::zero(),
|
||||
glyphs: Cow::Borrowed(&[]),
|
||||
..*self
|
||||
}
|
||||
}
|
||||
|
||||
/// Push a hyphen to end of the text.
|
||||
pub fn push_hyphen(&mut self, engine: &Engine, fallback: bool) {
|
||||
self.insert_hyphen(engine, fallback, Side::Right)
|
||||
@ -493,7 +496,6 @@ impl<'a> ShapedText<'a> {
|
||||
range,
|
||||
safe_to_break: true,
|
||||
c: '-',
|
||||
span: (Span::detached(), 0),
|
||||
is_justifiable: false,
|
||||
script: Script::Common,
|
||||
};
|
||||
@ -592,11 +594,11 @@ impl Debug for ShapedText<'_> {
|
||||
/// Group a range of text by BiDi level and script, shape the runs and generate
|
||||
/// items for them.
|
||||
pub fn shape_range<'a>(
|
||||
items: &mut Vec<Item<'a>>,
|
||||
items: &mut Vec<(Range, Item<'a>)>,
|
||||
engine: &Engine,
|
||||
text: &'a str,
|
||||
bidi: &BidiInfo<'a>,
|
||||
range: Range,
|
||||
spans: &SpanMapper,
|
||||
styles: StyleChain<'a>,
|
||||
) {
|
||||
let script = TextElem::script_in(styles);
|
||||
@ -604,17 +606,9 @@ pub fn shape_range<'a>(
|
||||
let region = TextElem::region_in(styles);
|
||||
let mut process = |range: Range, level: BidiLevel| {
|
||||
let dir = if level.is_ltr() { Dir::LTR } else { Dir::RTL };
|
||||
let shaped = shape(
|
||||
engine,
|
||||
range.start,
|
||||
&bidi.text[range],
|
||||
spans,
|
||||
styles,
|
||||
dir,
|
||||
lang,
|
||||
region,
|
||||
);
|
||||
items.push(Item::Text(shaped));
|
||||
let shaped =
|
||||
shape(engine, range.start, &text[range.clone()], styles, dir, lang, region);
|
||||
items.push((range, Item::Text(shaped)));
|
||||
};
|
||||
|
||||
let mut prev_level = BidiLevel::ltr();
|
||||
@ -625,14 +619,14 @@ pub fn shape_range<'a>(
|
||||
// set (rather than inferred from the glyphs), we keep the script at an
|
||||
// unchanging `Script::Unknown` so that only level changes cause breaks.
|
||||
for i in range.clone() {
|
||||
if !bidi.text.is_char_boundary(i) {
|
||||
if !text.is_char_boundary(i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let level = bidi.levels[i];
|
||||
let curr_script = match script {
|
||||
Smart::Auto => {
|
||||
bidi.text[i..].chars().next().map_or(Script::Unknown, |c| c.script())
|
||||
text[i..].chars().next().map_or(Script::Unknown, |c| c.script())
|
||||
}
|
||||
Smart::Custom(_) => Script::Unknown,
|
||||
};
|
||||
@ -668,7 +662,6 @@ fn shape<'a>(
|
||||
engine: &Engine,
|
||||
base: usize,
|
||||
text: &'a str,
|
||||
spans: &SpanMapper,
|
||||
styles: StyleChain<'a>,
|
||||
dir: Dir,
|
||||
lang: Lang,
|
||||
@ -677,7 +670,6 @@ fn shape<'a>(
|
||||
let size = TextElem::size_in(styles);
|
||||
let mut ctx = ShapingContext {
|
||||
engine,
|
||||
spans,
|
||||
size,
|
||||
glyphs: vec![],
|
||||
used: vec![],
|
||||
@ -717,7 +709,6 @@ fn shape<'a>(
|
||||
/// Holds shaping results and metadata common to all shaped segments.
|
||||
struct ShapingContext<'a, 'v> {
|
||||
engine: &'a Engine<'v>,
|
||||
spans: &'a SpanMapper,
|
||||
glyphs: Vec<ShapedGlyph>,
|
||||
used: Vec<Font>,
|
||||
styles: StyleChain<'a>,
|
||||
@ -830,7 +821,6 @@ fn shape_segment<'a>(
|
||||
range: start..end,
|
||||
safe_to_break: !info.unsafe_to_break(),
|
||||
c,
|
||||
span: ctx.spans.span_at(start),
|
||||
is_justifiable: is_justifiable(
|
||||
c,
|
||||
script,
|
||||
@ -921,7 +911,6 @@ fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, font: Font) {
|
||||
range: start..end,
|
||||
safe_to_break: true,
|
||||
c,
|
||||
span: ctx.spans.span_at(start),
|
||||
is_justifiable: is_justifiable(
|
||||
c,
|
||||
script,
|
||||
|
@ -106,7 +106,6 @@ pub fn define(global: &mut Scope) {
|
||||
global.define_elem::<ColumnsElem>();
|
||||
global.define_elem::<ColbreakElem>();
|
||||
global.define_elem::<PlaceElem>();
|
||||
global.define_elem::<FlushElem>();
|
||||
global.define_elem::<AlignElem>();
|
||||
global.define_elem::<PadElem>();
|
||||
global.define_elem::<RepeatElem>();
|
||||
|
@ -46,7 +46,7 @@ pub struct PlaceElem {
|
||||
/// Floating elements are positioned at the top or bottom of the page,
|
||||
/// displacing in-flow content. They are always placed in the in-flow
|
||||
/// order relative to each other, as well as before any content following
|
||||
/// a later [`flush`] element.
|
||||
/// a later [`place.flush`] element.
|
||||
///
|
||||
/// ```example
|
||||
/// #set page(height: 150pt)
|
||||
|
@ -60,10 +60,12 @@ use std::collections::HashSet;
|
||||
use std::ops::{Deref, Range};
|
||||
|
||||
use comemo::{Track, Tracked, Validate};
|
||||
use ecow::{EcoString, EcoVec};
|
||||
use ecow::{eco_format, eco_vec, EcoString, EcoVec};
|
||||
use typst_timing::{timed, TimingScope};
|
||||
|
||||
use crate::diag::{warning, FileResult, SourceDiagnostic, SourceResult, Warned};
|
||||
use crate::diag::{
|
||||
warning, FileError, FileResult, SourceDiagnostic, SourceResult, Warned,
|
||||
};
|
||||
use crate::engine::{Engine, Route, Sink, Traced};
|
||||
use crate::foundations::{
|
||||
Array, Bytes, Datetime, Dict, Module, Scope, StyleChain, Styles, Value,
|
||||
@ -110,13 +112,19 @@ fn compile_inner(
|
||||
let library = world.library();
|
||||
let styles = StyleChain::new(&library.styles);
|
||||
|
||||
// Fetch the main source file once.
|
||||
let main = world.main();
|
||||
let main = world
|
||||
.source(main)
|
||||
.map_err(|err| hint_invalid_main_file(world, err, main))?;
|
||||
|
||||
// First evaluate the main source file into a module.
|
||||
let content = crate::eval::eval(
|
||||
world,
|
||||
traced,
|
||||
sink.track_mut(),
|
||||
Route::default().track(),
|
||||
&world.main(),
|
||||
&main,
|
||||
)?
|
||||
.content();
|
||||
|
||||
@ -205,8 +213,8 @@ pub trait World: Send + Sync {
|
||||
/// Metadata about all known fonts.
|
||||
fn book(&self) -> &LazyHash<FontBook>;
|
||||
|
||||
/// Access the main source file.
|
||||
fn main(&self) -> Source;
|
||||
/// Get the file id of the main source file.
|
||||
fn main(&self) -> FileId;
|
||||
|
||||
/// Try to access the specified source file.
|
||||
fn source(&self, id: FileId) -> FileResult<Source>;
|
||||
@ -248,7 +256,7 @@ macro_rules! delegate_for_ptr {
|
||||
self.deref().book()
|
||||
}
|
||||
|
||||
fn main(&self) -> Source {
|
||||
fn main(&self) -> FileId {
|
||||
self.deref().main()
|
||||
}
|
||||
|
||||
@ -404,3 +412,48 @@ fn prelude(global: &mut Scope) {
|
||||
global.define("horizon", Alignment::HORIZON);
|
||||
global.define("bottom", Alignment::BOTTOM);
|
||||
}
|
||||
|
||||
/// Adds useful hints when the main source file couldn't be read
|
||||
/// and returns the final diagnostic.
|
||||
fn hint_invalid_main_file(
|
||||
world: Tracked<dyn World + '_>,
|
||||
file_error: FileError,
|
||||
input: FileId,
|
||||
) -> EcoVec<SourceDiagnostic> {
|
||||
let is_utf8_error = matches!(file_error, FileError::InvalidUtf8);
|
||||
let mut diagnostic =
|
||||
SourceDiagnostic::error(Span::detached(), EcoString::from(file_error));
|
||||
|
||||
// Attempt to provide helpful hints for UTF-8 errors. Perhaps the user
|
||||
// mistyped the filename. For example, they could have written "file.pdf"
|
||||
// instead of "file.typ".
|
||||
if is_utf8_error {
|
||||
let path = input.vpath();
|
||||
let extension = path.as_rootless_path().extension();
|
||||
if extension.is_some_and(|extension| extension == "typ") {
|
||||
// No hints if the file is already a .typ file.
|
||||
// The file is indeed just invalid.
|
||||
return eco_vec![diagnostic];
|
||||
}
|
||||
|
||||
match extension {
|
||||
Some(extension) => {
|
||||
diagnostic.hint(eco_format!(
|
||||
"a file with the `.{}` extension is not usually a Typst file",
|
||||
extension.to_string_lossy()
|
||||
));
|
||||
}
|
||||
|
||||
None => {
|
||||
diagnostic
|
||||
.hint("a file without an extension is not usually a Typst file");
|
||||
}
|
||||
};
|
||||
|
||||
if world.source(input.with_extension("typ")).is_ok() {
|
||||
diagnostic.hint("check if you meant to use the `.typ` extension instead");
|
||||
}
|
||||
}
|
||||
|
||||
eco_vec![diagnostic]
|
||||
}
|
||||
|
@ -52,31 +52,47 @@ pub struct AttachElem {
|
||||
impl LayoutMath for Packed<AttachElem> {
|
||||
#[typst_macros::time(name = "math.attach", span = self.span())]
|
||||
fn layout_math(&self, ctx: &mut MathContext, styles: StyleChain) -> SourceResult<()> {
|
||||
type GetAttachment = fn(&AttachElem, styles: StyleChain) -> Option<Content>;
|
||||
|
||||
let layout_attachment =
|
||||
|ctx: &mut MathContext, styles: StyleChain, getter: GetAttachment| {
|
||||
getter(self, styles)
|
||||
.map(|elem| ctx.layout_into_fragment(&elem, styles))
|
||||
.transpose()
|
||||
};
|
||||
|
||||
let base = ctx.layout_into_fragment(self.base(), styles)?;
|
||||
|
||||
let sup_style = style_for_superscript(styles);
|
||||
let tl = layout_attachment(ctx, styles.chain(&sup_style), AttachElem::tl)?;
|
||||
let tr = layout_attachment(ctx, styles.chain(&sup_style), AttachElem::tr)?;
|
||||
let t = layout_attachment(ctx, styles.chain(&sup_style), AttachElem::t)?;
|
||||
let sup_style_chain = styles.chain(&sup_style);
|
||||
let tl = self.tl(sup_style_chain);
|
||||
let tr = self.tr(sup_style_chain);
|
||||
let primed = tr.as_ref().is_some_and(|content| content.is::<PrimesElem>());
|
||||
let t = self.t(sup_style_chain);
|
||||
|
||||
let sub_style = style_for_subscript(styles);
|
||||
let bl = layout_attachment(ctx, styles.chain(&sub_style), AttachElem::bl)?;
|
||||
let br = layout_attachment(ctx, styles.chain(&sub_style), AttachElem::br)?;
|
||||
let b = layout_attachment(ctx, styles.chain(&sub_style), AttachElem::b)?;
|
||||
let sub_style_chain = styles.chain(&sub_style);
|
||||
let bl = self.bl(sub_style_chain);
|
||||
let br = self.br(sub_style_chain);
|
||||
let b = self.b(sub_style_chain);
|
||||
|
||||
let limits = base.limits().active(styles);
|
||||
let (t, tr) = if limits || tr.is_some() { (t, tr) } else { (None, t) };
|
||||
let (t, tr) = match (t, tr) {
|
||||
(Some(t), Some(tr)) if primed && !limits => (None, Some(tr + t)),
|
||||
(Some(t), None) if !limits => (None, Some(t)),
|
||||
(t, tr) => (t, tr),
|
||||
};
|
||||
let (b, br) = if limits || br.is_some() { (b, br) } else { (None, b) };
|
||||
layout_attachments(ctx, styles, base, [tl, t, tr, bl, b, br])
|
||||
|
||||
macro_rules! layout {
|
||||
($content:ident, $style_chain:ident) => {
|
||||
$content
|
||||
.map(|elem| ctx.layout_into_fragment(&elem, $style_chain))
|
||||
.transpose()
|
||||
};
|
||||
}
|
||||
|
||||
let fragments = [
|
||||
layout!(tl, sup_style_chain)?,
|
||||
layout!(t, sup_style_chain)?,
|
||||
layout!(tr, sup_style_chain)?,
|
||||
layout!(bl, sub_style_chain)?,
|
||||
layout!(b, sub_style_chain)?,
|
||||
layout!(br, sub_style_chain)?,
|
||||
];
|
||||
|
||||
layout_attachments(ctx, styles, base, fragments)
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,7 +143,7 @@ impl LayoutMath for Packed<PrimesElem> {
|
||||
prime.clone(),
|
||||
)
|
||||
}
|
||||
ctx.push(FrameFragment::new(ctx, styles, frame));
|
||||
ctx.push(FrameFragment::new(ctx, styles, frame).with_text_like(true));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
@ -245,8 +261,11 @@ fn layout_attachments(
|
||||
base: MathFragment,
|
||||
[tl, t, tr, bl, b, br]: [Option<MathFragment>; 6],
|
||||
) -> SourceResult<()> {
|
||||
let (shift_up, shift_down) =
|
||||
compute_shifts_up_and_down(ctx, styles, &base, [&tl, &tr, &bl, &br]);
|
||||
let (shift_up, shift_down) = if [&tl, &tr, &bl, &br].iter().all(|e| e.is_none()) {
|
||||
(Abs::zero(), Abs::zero())
|
||||
} else {
|
||||
compute_shifts_up_and_down(ctx, styles, &base, [&tl, &tr, &bl, &br])
|
||||
};
|
||||
|
||||
let sup_delta = Abs::zero();
|
||||
let sub_delta = -base.italics_correction();
|
||||
@ -271,7 +290,11 @@ fn layout_attachments(
|
||||
let post_width_max =
|
||||
(sup_delta + measure!(tr, width)).max(sub_delta + measure!(br, width));
|
||||
|
||||
let (center_frame, base_offset) = attach_top_and_bottom(ctx, styles, base, t, b);
|
||||
let (center_frame, base_offset) = if t.is_none() && b.is_none() {
|
||||
(base.into_frame(), Abs::zero())
|
||||
} else {
|
||||
attach_top_and_bottom(ctx, styles, base, t, b)
|
||||
};
|
||||
if [&tl, &bl, &tr, &br].iter().all(|&e| e.is_none()) {
|
||||
ctx.push(FrameFragment::new(ctx, styles, center_frame).with_class(base_class));
|
||||
return Ok(());
|
||||
@ -327,7 +350,7 @@ fn layout_attachments(
|
||||
}
|
||||
|
||||
fn attach_top_and_bottom(
|
||||
ctx: &mut MathContext,
|
||||
ctx: &MathContext,
|
||||
styles: StyleChain,
|
||||
base: MathFragment,
|
||||
t: Option<MathFragment>,
|
||||
|
@ -65,13 +65,13 @@ pub struct CancelElem {
|
||||
|
||||
/// How much to rotate the cancel line.
|
||||
///
|
||||
/// - If given an angle, the line is rotated by that angle clockwise with
|
||||
/// respect to the y-axis.
|
||||
/// - If `{auto}`, the line assumes the default angle; that is, along the
|
||||
/// diagonal line of the content box.
|
||||
/// - If given an angle, the line is rotated by that angle clockwise w.r.t
|
||||
/// the y-axis.
|
||||
/// - If given a function `angle => angle`, the line is rotated by the angle
|
||||
/// returned by that function. The function receives the default angle as
|
||||
/// its input.
|
||||
/// rising diagonal of the content box.
|
||||
/// - If given a function `angle => angle`, the line is rotated, with
|
||||
/// respect to the y-axis, by the angle returned by that function. The
|
||||
/// function receives the default angle as its input.
|
||||
///
|
||||
/// ```example
|
||||
/// >>> #set page(width: 140pt)
|
||||
|
@ -71,6 +71,13 @@ impl MathFragment {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_ignorant(&self) -> bool {
|
||||
match self {
|
||||
Self::Frame(fragment) => fragment.ignorant,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn class(&self) -> MathClass {
|
||||
match self {
|
||||
Self::Glyph(glyph) => glyph.class,
|
||||
@ -120,17 +127,18 @@ impl MathFragment {
|
||||
}
|
||||
|
||||
pub fn is_spaced(&self) -> bool {
|
||||
self.class() == MathClass::Fence
|
||||
|| match self {
|
||||
MathFragment::Frame(frame) => {
|
||||
frame.spaced
|
||||
&& matches!(
|
||||
frame.class,
|
||||
MathClass::Normal | MathClass::Alphabetic
|
||||
)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
if self.class() == MathClass::Fence {
|
||||
return true;
|
||||
}
|
||||
|
||||
matches!(
|
||||
self,
|
||||
MathFragment::Frame(FrameFragment {
|
||||
spaced: true,
|
||||
class: MathClass::Normal | MathClass::Alphabetic,
|
||||
..
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_text_like(&self) -> bool {
|
||||
@ -440,6 +448,7 @@ pub struct FrameFragment {
|
||||
pub italics_correction: Abs,
|
||||
pub accent_attach: Abs,
|
||||
pub text_like: bool,
|
||||
pub ignorant: bool,
|
||||
}
|
||||
|
||||
impl FrameFragment {
|
||||
@ -458,6 +467,7 @@ impl FrameFragment {
|
||||
italics_correction: Abs::zero(),
|
||||
accent_attach,
|
||||
text_like: false,
|
||||
ignorant: false,
|
||||
}
|
||||
}
|
||||
|
||||
@ -488,6 +498,10 @@ impl FrameFragment {
|
||||
pub fn with_text_like(self, text_like: bool) -> Self {
|
||||
Self { text_like, ..self }
|
||||
}
|
||||
|
||||
pub fn with_ignorant(self, ignorant: bool) -> Self {
|
||||
Self { ignorant, ..self }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
@ -49,6 +49,7 @@ use crate::foundations::{
|
||||
};
|
||||
use crate::introspection::TagElem;
|
||||
use crate::layout::{BoxElem, Frame, FrameItem, HElem, Point, Size, Spacing, VAlignment};
|
||||
use crate::realize::Behaviour;
|
||||
use crate::realize::{process, BehavedBuilder};
|
||||
use crate::text::{LinebreakElem, SpaceElem, TextElem};
|
||||
|
||||
@ -299,7 +300,7 @@ impl LayoutMath for Content {
|
||||
if let Some(elem) = self.to_packed::<TagElem>() {
|
||||
let mut frame = Frame::soft(Size::zero());
|
||||
frame.push(Point::zero(), FrameItem::Tag(elem.tag.clone()));
|
||||
ctx.push(FrameFragment::new(ctx, styles, frame));
|
||||
ctx.push(FrameFragment::new(ctx, styles, frame).with_ignorant(true));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@ -312,7 +313,15 @@ impl LayoutMath for Content {
|
||||
let axis = scaled!(ctx, styles, axis_height);
|
||||
frame.set_baseline(frame.height() / 2.0 + axis);
|
||||
}
|
||||
ctx.push(FrameFragment::new(ctx, styles, frame).with_spaced(true));
|
||||
|
||||
ctx.push(
|
||||
FrameFragment::new(ctx, styles, frame)
|
||||
.with_spaced(true)
|
||||
.with_ignorant(matches!(
|
||||
self.behaviour(),
|
||||
Behaviour::Invisible | Behaviour::Ignorant
|
||||
)),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -77,14 +77,17 @@ impl MathRun {
|
||||
fragment.set_class(MathClass::Binary);
|
||||
}
|
||||
|
||||
// Insert spacing between the last and this item.
|
||||
if let Some(i) = last {
|
||||
if let Some(s) = spacing(&resolved[i], space.take(), &fragment) {
|
||||
resolved.insert(i + 1, s);
|
||||
// Insert spacing between the last and this non-ignorant item.
|
||||
if !fragment.is_ignorant() {
|
||||
if let Some(i) = last {
|
||||
if let Some(s) = spacing(&resolved[i], space.take(), &fragment) {
|
||||
resolved.insert(i + 1, s);
|
||||
}
|
||||
}
|
||||
|
||||
last = Some(resolved.len());
|
||||
}
|
||||
|
||||
last = Some(resolved.len());
|
||||
resolved.push(fragment);
|
||||
}
|
||||
|
||||
@ -156,10 +159,19 @@ impl MathRun {
|
||||
|
||||
pub fn into_fragment(self, ctx: &MathContext, styles: StyleChain) -> MathFragment {
|
||||
if self.0.len() == 1 {
|
||||
self.0.into_iter().next().unwrap()
|
||||
} else {
|
||||
FrameFragment::new(ctx, styles, self.into_frame(ctx, styles)).into()
|
||||
return self.0.into_iter().next().unwrap();
|
||||
}
|
||||
|
||||
// Fragments without a math_size are ignored: the notion of size do not
|
||||
// apply to them, so their text-likeness is meaningless.
|
||||
let text_like = self
|
||||
.iter()
|
||||
.filter(|e| e.math_size().is_some())
|
||||
.all(|e| e.is_text_like());
|
||||
|
||||
FrameFragment::new(ctx, styles, self.into_frame(ctx, styles))
|
||||
.with_text_like(text_like)
|
||||
.into()
|
||||
}
|
||||
|
||||
/// Returns a builder that lays out the [`MathFragment`]s into a possibly
|
||||
|
@ -93,6 +93,15 @@ impl FootnoteElem {
|
||||
Self::new(FootnoteBody::Reference(label))
|
||||
}
|
||||
|
||||
/// Creates a new footnote referencing the footnote with the specified label,
|
||||
/// with the other fields from the current footnote cloned.
|
||||
pub fn into_ref(&self, label: Label) -> Self {
|
||||
Self {
|
||||
body: FootnoteBody::Reference(label),
|
||||
..self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Tests if this footnote is a reference to another footnote.
|
||||
pub fn is_ref(&self) -> bool {
|
||||
matches!(self.body(), FootnoteBody::Reference(_))
|
||||
|
@ -483,7 +483,7 @@ impl OutlineEntry {
|
||||
|
||||
impl Show for Packed<OutlineEntry> {
|
||||
#[typst_macros::time(name = "outline.entry", span = self.span())]
|
||||
fn show(&self, _: &mut Engine, _: StyleChain) -> SourceResult<Content> {
|
||||
fn show(&self, _: &mut Engine, styles: StyleChain) -> SourceResult<Content> {
|
||||
let mut seq = vec![];
|
||||
let elem = self.element();
|
||||
|
||||
@ -500,7 +500,11 @@ impl Show for Packed<OutlineEntry> {
|
||||
};
|
||||
|
||||
// The body text remains overridable.
|
||||
seq.push(self.body().clone().linked(Destination::Location(location)));
|
||||
crate::text::isolate(
|
||||
self.body().clone().linked(Destination::Location(location)),
|
||||
styles,
|
||||
&mut seq,
|
||||
);
|
||||
|
||||
// Add filler symbols between the section name and page number.
|
||||
if let Some(filler) = self.fill() {
|
||||
|
@ -18,9 +18,9 @@ use crate::realize::StyleVec;
|
||||
///
|
||||
/// # Example
|
||||
/// ```example
|
||||
/// #show par: set block(spacing: 0.65em)
|
||||
/// #set par(
|
||||
/// first-line-indent: 1em,
|
||||
/// spacing: 0.65em,
|
||||
/// justify: true,
|
||||
/// )
|
||||
///
|
||||
@ -115,8 +115,7 @@ pub struct ParElem {
|
||||
/// By typographic convention, paragraph breaks are indicated either by some
|
||||
/// space between paragraphs or by indented first lines. Consider reducing
|
||||
/// the [paragraph spacing]($block.spacing) to the [`leading`]($par.leading)
|
||||
/// when using this property (e.g. using
|
||||
/// `[#show par: set block(spacing: 0.65em)]`).
|
||||
/// when using this property (e.g. using `[#set par(spacing: 0.65em)]`).
|
||||
#[ghost]
|
||||
pub first_line_indent: Length,
|
||||
|
||||
|
@ -177,8 +177,8 @@ impl Show for Packed<RefElem> {
|
||||
|
||||
let elem = elem.at(span)?;
|
||||
|
||||
if elem.func() == FootnoteElem::elem() {
|
||||
return Ok(FootnoteElem::with_label(target).pack().spanned(span));
|
||||
if let Some(footnote) = elem.to_packed::<FootnoteElem>() {
|
||||
return Ok(footnote.into_ref(target).pack().spanned(span));
|
||||
}
|
||||
|
||||
let elem = elem.clone();
|
||||
|
@ -480,7 +480,6 @@ pub(crate) const SYM: &[(&str, Symbol)] = symbols! {
|
||||
checkmark: ['✓', light: '🗸', heavy: '✔'],
|
||||
crossmark: ['✗', heavy: '✘'],
|
||||
floral: ['❦', l: '☙', r: '❧'],
|
||||
notes: [up: '🎜', down: '🎝'],
|
||||
refmark: '※',
|
||||
servicemark: '℠',
|
||||
maltese: '✠',
|
||||
@ -495,6 +494,51 @@ pub(crate) const SYM: &[(&str, Symbol)] = symbols! {
|
||||
spade.stroked: '♤',
|
||||
],
|
||||
|
||||
// Music.
|
||||
note: [
|
||||
up: '🎜',
|
||||
down: '🎝',
|
||||
whole: '𝅝',
|
||||
half: '𝅗𝅥',
|
||||
quarter: '𝅘𝅥',
|
||||
quarter.alt: '♩',
|
||||
eighth: '𝅘𝅥𝅮',
|
||||
eighth.alt: '♪',
|
||||
eighth.beamed: '♫',
|
||||
sixteenth: '𝅘𝅥𝅯',
|
||||
sixteenth.beamed: '♬',
|
||||
grace: '𝆕',
|
||||
grace.slash: '𝆔',
|
||||
],
|
||||
rest: [
|
||||
whole: '𝄻',
|
||||
multiple: '𝄺',
|
||||
multiple.measure: '𝄩',
|
||||
half: '𝄼',
|
||||
quarter: '𝄽',
|
||||
eighth: '𝄾',
|
||||
sixteenth: '𝄿',
|
||||
],
|
||||
natural: [
|
||||
'♮',
|
||||
t: '𝄮',
|
||||
b: '𝄯',
|
||||
],
|
||||
flat: [
|
||||
'♭',
|
||||
t: '𝄬',
|
||||
b: '𝄭',
|
||||
double: '𝄫',
|
||||
quarter: '𝄳',
|
||||
],
|
||||
sharp: [
|
||||
'♯',
|
||||
t: '𝄰',
|
||||
b: '𝄱',
|
||||
double: '𝄪',
|
||||
quarter: '𝄲',
|
||||
],
|
||||
|
||||
// Shapes.
|
||||
bullet: '•',
|
||||
circle: [
|
||||
|
@ -48,6 +48,16 @@ static EXCEPTION_MAP: phf::Map<&'static str, Exception> = phf::phf_map! {
|
||||
// See https://corefonts.sourceforge.net/.
|
||||
"Arial-Black" => Exception::new()
|
||||
.weight(900),
|
||||
// Archivo Narrow is different from Archivo and Archivo Black. Since Archivo Black seems
|
||||
// identical to Archivo weight 900, only differentiate between Archivo and Archivo Narrow.
|
||||
"ArchivoNarrow-Regular" => Exception::new()
|
||||
.family("Archivo Narrow"),
|
||||
"ArchivoNarrow-Italic" => Exception::new()
|
||||
.family("Archivo Narrow"),
|
||||
"ArchivoNarrow-Bold" => Exception::new()
|
||||
.family("Archivo Narrow"),
|
||||
"ArchivoNarrow-BoldItalic" => Exception::new()
|
||||
.family("Archivo Narrow"),
|
||||
// Fandol fonts designed for Chinese typesetting.
|
||||
// See https://ctan.org/tex-archive/fonts/fandol/.
|
||||
"FandolHei-Bold" => Exception::new()
|
||||
|
@ -1300,3 +1300,13 @@ cast! {
|
||||
ret
|
||||
},
|
||||
}
|
||||
|
||||
/// Pushes `text` wrapped in LRE/RLE + PDF to `out`.
|
||||
pub(crate) fn isolate(text: Content, styles: StyleChain, out: &mut Vec<Content>) {
|
||||
out.push(TextElem::packed(match TextElem::dir_in(styles) {
|
||||
Dir::RTL => "\u{202B}",
|
||||
_ => "\u{202A}",
|
||||
}));
|
||||
out.push(text);
|
||||
out.push(TextElem::packed("\u{202C}"));
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ impl SmartQuoter {
|
||||
|
||||
/// Process the last seen character.
|
||||
pub fn last(&mut self, c: char, is_quote: bool) {
|
||||
self.expect_opening = is_ignorable(c) || is_opening_bracket(c);
|
||||
self.expect_opening = is_exterior_to_quote(c) || is_opening_bracket(c);
|
||||
self.last_num = c.is_numeric();
|
||||
if !is_quote {
|
||||
self.prev_quote_type = None;
|
||||
@ -150,7 +150,7 @@ impl SmartQuoter {
|
||||
self.prev_quote_type = Some(double);
|
||||
quotes.open(double)
|
||||
} else if self.quote_depth > 0
|
||||
&& (peeked.is_ascii_punctuation() || is_ignorable(peeked))
|
||||
&& (peeked.is_ascii_punctuation() || is_exterior_to_quote(peeked))
|
||||
{
|
||||
self.quote_depth -= 1;
|
||||
quotes.close(double)
|
||||
@ -168,7 +168,7 @@ impl Default for SmartQuoter {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_ignorable(c: char) -> bool {
|
||||
fn is_exterior_to_quote(c: char) -> bool {
|
||||
c.is_whitespace() || is_newline(c)
|
||||
}
|
||||
|
||||
|
@ -196,7 +196,7 @@ fn layout_image(
|
||||
format,
|
||||
elem.alt(styles),
|
||||
engine.world,
|
||||
&families(styles).map(|s| s.into()).collect::<Vec<_>>(),
|
||||
&families(styles).collect::<Vec<_>>(),
|
||||
)
|
||||
.at(span)?;
|
||||
|
||||
@ -360,7 +360,7 @@ impl Image {
|
||||
format: ImageFormat,
|
||||
alt: Option<EcoString>,
|
||||
world: Tracked<dyn World + '_>,
|
||||
families: &[String],
|
||||
families: &[&str],
|
||||
) -> StrResult<Image> {
|
||||
let kind = match format {
|
||||
ImageFormat::Raster(format) => {
|
||||
|
@ -30,11 +30,11 @@ impl RasterImage {
|
||||
/// Decode a raster image.
|
||||
#[comemo::memoize]
|
||||
pub fn new(data: Bytes, format: RasterFormat) -> StrResult<RasterImage> {
|
||||
fn decode_with<'a, T: ImageDecoder<'a>>(
|
||||
fn decode_with<T: ImageDecoder>(
|
||||
decoder: ImageResult<T>,
|
||||
) -> ImageResult<(image::DynamicImage, Option<Vec<u8>>)> {
|
||||
let mut decoder = decoder?;
|
||||
let icc = decoder.icc_profile().filter(|icc| !icc.is_empty());
|
||||
let icc = decoder.icc_profile().ok().flatten().filter(|icc| !icc.is_empty());
|
||||
decoder.set_limits(Limits::default())?;
|
||||
let dynamic = image::DynamicImage::from_decoder(decoder)?;
|
||||
Ok((dynamic, icc))
|
||||
|
@ -40,7 +40,7 @@ impl SvgImage {
|
||||
pub fn with_fonts(
|
||||
data: Bytes,
|
||||
world: Tracked<dyn World + '_>,
|
||||
families: &[String],
|
||||
families: &[&str],
|
||||
) -> StrResult<SvgImage> {
|
||||
let book = world.book();
|
||||
let resolver = Mutex::new(FontResolver::new(world, book, families));
|
||||
@ -142,7 +142,7 @@ struct FontResolver<'a> {
|
||||
/// The world we use to load fonts.
|
||||
world: Tracked<'a, dyn World + 'a>,
|
||||
/// The active list of font families at the location of the SVG.
|
||||
families: &'a [String],
|
||||
families: &'a [&'a str],
|
||||
/// A mapping from Typst font indices to fontdb IDs.
|
||||
to_id: HashMap<usize, Option<fontdb::ID>>,
|
||||
/// The reverse mapping.
|
||||
@ -156,7 +156,7 @@ impl<'a> FontResolver<'a> {
|
||||
fn new(
|
||||
world: Tracked<'a, dyn World + 'a>,
|
||||
book: &'a FontBook,
|
||||
families: &'a [String],
|
||||
families: &'a [&'a str],
|
||||
) -> Self {
|
||||
Self {
|
||||
book,
|
||||
@ -191,11 +191,11 @@ impl FontResolver<'_> {
|
||||
font.families()
|
||||
.iter()
|
||||
.filter_map(|family| match family {
|
||||
usvg::FontFamily::Named(named) => Some(named),
|
||||
usvg::FontFamily::Named(named) => Some(named.as_str()),
|
||||
// We don't support generic families at the moment.
|
||||
_ => None,
|
||||
})
|
||||
.chain(self.families)
|
||||
.chain(self.families.iter().copied())
|
||||
.filter_map(|named| self.book.select(&named.to_lowercase(), variant))
|
||||
.find_map(|index| self.get_or_load(index, db))
|
||||
}
|
||||
|
@ -593,10 +593,9 @@ The example below
|
||||
|
||||
```typ
|
||||
#set page(margin: 1.75in)
|
||||
#set par(leading: 0.55em, first-line-indent: 1.8em, justify: true)
|
||||
#set par(leading: 0.55em, spacing: 0.55em, first-line-indent: 1.8em, justify: true)
|
||||
#set text(font: "New Computer Modern")
|
||||
#show raw: set text(font: "New Computer Modern Mono")
|
||||
#show par: set block(spacing: 0.55em)
|
||||
#show heading: set block(above: 1.4em, below: 1em)
|
||||
```
|
||||
|
||||
|
@ -120,7 +120,7 @@ a table listing all syntax that is available in code mode:
|
||||
| Named function | `{let f(x) = 2 * x}` | [Function]($function) |
|
||||
| Set rule | `{set text(14pt)}` | [Styling]($styling/#set-rules) |
|
||||
| Set-if rule | `{set text(..) if .. }` | [Styling]($styling/#set-rules) |
|
||||
| Show-set rule | `{show par: set block(..)}` | [Styling]($styling/#show-rules) |
|
||||
| Show-set rule | `{show heading: set block(..)}` | [Styling]($styling/#show-rules) |
|
||||
| Show rule with function | `{show raw: it => {..}}` | [Styling]($styling/#show-rules) |
|
||||
| Show-everything rule | `{show: columns.with(2)}` | [Styling]($styling/#show-rules) |
|
||||
| Context expression | `{context text.lang}` | [Context]($context) |
|
||||
|
@ -6,7 +6,7 @@ use heck::{ToKebabCase, ToTitleCase};
|
||||
use pulldown_cmark as md;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typed_arena::Arena;
|
||||
use typst::diag::{FileResult, StrResult};
|
||||
use typst::diag::{FileError, FileResult, StrResult};
|
||||
use typst::foundations::{Bytes, Datetime};
|
||||
use typst::layout::{Abs, Point, Size};
|
||||
use typst::syntax::{FileId, Source, VirtualPath};
|
||||
@ -463,12 +463,16 @@ impl World for DocWorld {
|
||||
&FONTS.0
|
||||
}
|
||||
|
||||
fn main(&self) -> Source {
|
||||
self.0.clone()
|
||||
fn main(&self) -> FileId {
|
||||
self.0.id()
|
||||
}
|
||||
|
||||
fn source(&self, _: FileId) -> FileResult<Source> {
|
||||
Ok(self.0.clone())
|
||||
fn source(&self, id: FileId) -> FileResult<Source> {
|
||||
if id == self.0.id() {
|
||||
Ok(self.0.clone())
|
||||
} else {
|
||||
Err(FileError::NotFound(id.vpath().as_rootless_path().into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn file(&self, id: FileId) -> FileResult<Bytes> {
|
||||
|
@ -44,8 +44,8 @@ static GROUPS: Lazy<Vec<GroupData>> = Lazy::new(|| {
|
||||
.module()
|
||||
.scope()
|
||||
.iter()
|
||||
.filter(|(_, v)| matches!(v, Value::Func(_)))
|
||||
.map(|(k, _)| k.clone())
|
||||
.filter(|(_, v, _)| matches!(v, Value::Func(_)))
|
||||
.map(|(k, _, _)| k.clone())
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
@ -249,7 +249,7 @@ fn category_page(resolver: &dyn Resolver, category: Category) -> PageModel {
|
||||
|
||||
// Add values and types.
|
||||
let scope = module.scope();
|
||||
for (name, value) in scope.iter() {
|
||||
for (name, value, _) in scope.iter() {
|
||||
if scope.get_category(name) != Some(category) {
|
||||
continue;
|
||||
}
|
||||
@ -463,7 +463,7 @@ fn casts(
|
||||
fn scope_models(resolver: &dyn Resolver, name: &str, scope: &Scope) -> Vec<FuncModel> {
|
||||
scope
|
||||
.iter()
|
||||
.filter_map(|(_, value)| {
|
||||
.filter_map(|(_, value, _)| {
|
||||
let Value::Func(func) = value else { return None };
|
||||
Some(func_model(resolver, func, &[name], true))
|
||||
})
|
||||
@ -649,7 +649,7 @@ fn symbols_page(resolver: &dyn Resolver, parent: &str, group: &GroupData) -> Pag
|
||||
/// Produce a symbol list's model.
|
||||
fn symbols_model(resolver: &dyn Resolver, group: &GroupData) -> SymbolsModel {
|
||||
let mut list = vec![];
|
||||
for (name, value) in group.module().scope().iter() {
|
||||
for (name, value, _) in group.module().scope().iter() {
|
||||
let Value::Symbol(symbol) = value else { continue };
|
||||
let complete = |variant: &str| {
|
||||
if variant.is_empty() {
|
||||
|
@ -39,16 +39,20 @@ impl World for FuzzWorld {
|
||||
&self.book
|
||||
}
|
||||
|
||||
fn main(&self) -> Source {
|
||||
self.source.clone()
|
||||
fn main(&self) -> FileId {
|
||||
self.source.id()
|
||||
}
|
||||
|
||||
fn source(&self, src: FileId) -> FileResult<Source> {
|
||||
Err(FileError::NotFound(src.vpath().as_rootless_path().into()))
|
||||
fn source(&self, id: FileId) -> FileResult<Source> {
|
||||
if id == self.source.id() {
|
||||
Ok(self.source.clone())
|
||||
} else {
|
||||
Err(FileError::NotFound(id.vpath().as_rootless_path().into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn file(&self, src: FileId) -> FileResult<Bytes> {
|
||||
Err(FileError::NotFound(src.vpath().as_rootless_path().into()))
|
||||
fn file(&self, id: FileId) -> FileResult<Bytes> {
|
||||
Err(FileError::NotFound(id.vpath().as_rootless_path().into()))
|
||||
}
|
||||
|
||||
fn font(&self, _: usize) -> Option<Font> {
|
||||
|
Before Width: | Height: | Size: 3.8 KiB After Width: | Height: | Size: 3.8 KiB |
Before Width: | Height: | Size: 378 B After Width: | Height: | Size: 361 B |
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
Before Width: | Height: | Size: 850 B After Width: | Height: | Size: 881 B |
Before Width: | Height: | Size: 4.3 KiB After Width: | Height: | Size: 4.3 KiB |
Before Width: | Height: | Size: 4.3 KiB After Width: | Height: | Size: 4.3 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
Before Width: | Height: | Size: 8.6 KiB After Width: | Height: | Size: 8.7 KiB |
BIN
tests/ref/hyphenate-outside-of-words.png
Normal file
After Width: | Height: | Size: 1011 B |
Before Width: | Height: | Size: 6.2 KiB After Width: | Height: | Size: 6.2 KiB |
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 10 KiB After Width: | Height: | Size: 10 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 8.4 KiB After Width: | Height: | Size: 8.5 KiB |
BIN
tests/ref/issue-3355-metadata-weak-spacing.png
Normal file
After Width: | Height: | Size: 327 B |
BIN
tests/ref/issue-3601-empty-raw.png
Normal file
After Width: | Height: | Size: 74 B |
BIN
tests/ref/issue-4278-par-trim-before-equation.png
Normal file
After Width: | Height: | Size: 1.1 KiB |
BIN
tests/ref/issue-4361-transparency-leak.png
Normal file
After Width: | Height: | Size: 3.4 KiB |
BIN
tests/ref/issue-4454-footnote-ref-numbering.png
Normal file
After Width: | Height: | Size: 830 B |
BIN
tests/ref/issue-4476-rtl-title-ending-in-ltr-text.png
Normal file
After Width: | Height: | Size: 6.2 KiB |
BIN
tests/ref/justify-basically-empty.png
Normal file
After Width: | Height: | Size: 74 B |
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 1.9 KiB |
Before Width: | Height: | Size: 651 B After Width: | Height: | Size: 649 B |
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.6 KiB |
BIN
tests/ref/math-primes-with-superscript.png
Normal file
After Width: | Height: | Size: 956 B |
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.5 KiB |
BIN
tests/ref/math-spacing-ignorant.png
Normal file
After Width: | Height: | Size: 686 B |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
BIN
tests/ref/par-metadata-after-trimmed-space.png
Normal file
After Width: | Height: | Size: 1.0 KiB |
BIN
tests/ref/par-trailing-whitespace.png
Normal file
After Width: | Height: | Size: 91 B |
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 19 KiB |
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 11 KiB |