Basic package management

This commit is contained in:
Laurenz 2023-06-26 13:57:21 +02:00
parent 9c7f31870b
commit 7b92bd7c34
44 changed files with 1413 additions and 810 deletions

132
Cargo.lock generated
View File

@ -522,6 +522,9 @@ name = "ecow"
version = "0.1.1" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5c5051925c54d9a42c8652313b5358a7432eed209466b443ed5220431243a14" checksum = "c5c5051925c54d9a42c8652313b5358a7432eed209466b443ed5220431243a14"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "either" name = "either"
@ -529,15 +532,6 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "elsa"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e0aca8dce8856e420195bd13b6a64de3334235ccc9214e824b86b12bf26283"
dependencies = [
"stable_deref_trait",
]
[[package]] [[package]]
name = "enum-ordinalize" name = "enum-ordinalize"
version = "3.1.13" version = "3.1.13"
@ -1726,6 +1720,21 @@ dependencies = [
"bytemuck", "bytemuck",
] ]
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin",
"untrusted",
"web-sys",
"winapi",
]
[[package]] [[package]]
name = "roff" name = "roff"
version = "0.2.1" version = "0.2.1"
@ -1783,6 +1792,28 @@ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "rustls"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f"
dependencies = [
"log",
"ring",
"rustls-webpki",
"sct",
]
[[package]]
name = "rustls-webpki"
version = "0.100.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b"
dependencies = [
"ring",
"untrusted",
]
[[package]] [[package]]
name = "rustversion" name = "rustversion"
version = "1.0.12" version = "1.0.12"
@ -1826,6 +1857,16 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "sct"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
dependencies = [
"ring",
"untrusted",
]
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.17" version = "1.0.17"
@ -1929,6 +1970,12 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]] [[package]]
name = "stable_deref_trait" name = "stable_deref_trait"
version = "1.2.0" version = "1.2.0"
@ -2091,6 +2138,17 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
version = "0.4.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.5.0" version = "3.5.0"
@ -2382,6 +2440,7 @@ dependencies = [
"svg2pdf", "svg2pdf",
"time", "time",
"tiny-skia", "tiny-skia",
"toml",
"tracing", "tracing",
"ttf-parser", "ttf-parser",
"typst-macros", "typst-macros",
@ -2405,7 +2464,7 @@ dependencies = [
"codespan-reporting", "codespan-reporting",
"comemo", "comemo",
"dirs", "dirs",
"elsa", "flate2",
"inferno", "inferno",
"memmap2", "memmap2",
"notify", "notify",
@ -2413,6 +2472,7 @@ dependencies = [
"open", "open",
"same-file", "same-file",
"siphasher", "siphasher",
"tar",
"tempfile", "tempfile",
"tracing", "tracing",
"tracing-error", "tracing-error",
@ -2420,6 +2480,7 @@ dependencies = [
"tracing-subscriber", "tracing-subscriber",
"typst", "typst",
"typst-library", "typst-library",
"ureq",
"walkdir", "walkdir",
] ]
@ -2496,7 +2557,6 @@ version = "0.5.0"
dependencies = [ dependencies = [
"clap 4.2.7", "clap 4.2.7",
"comemo", "comemo",
"elsa",
"iai", "iai",
"once_cell", "once_cell",
"oxipng", "oxipng",
@ -2623,6 +2683,28 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47" checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "ureq"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b11c96ac7ee530603dcdf68ed1557050f374ce55a5a07193ebf8cbc9f8927e9"
dependencies = [
"base64",
"flate2",
"log",
"once_cell",
"rustls",
"rustls-webpki",
"url",
"webpki-roots",
]
[[package]] [[package]]
name = "url" name = "url"
version = "2.3.1" version = "2.3.1"
@ -2788,6 +2870,25 @@ version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93"
[[package]]
name = "web-sys"
version = "0.3.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "webpki-roots"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338"
dependencies = [
"rustls-webpki",
]
[[package]] [[package]]
name = "weezl" name = "weezl"
version = "0.1.7" version = "0.1.7"
@ -2999,6 +3100,15 @@ dependencies = [
"tap", "tap",
] ]
[[package]]
name = "xattr"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "xmlparser" name = "xmlparser"
version = "0.13.5" version = "0.13.5"

View File

@ -34,7 +34,7 @@ typst-macros = { path = "macros" }
bitflags = { version = "2", features = ["serde"] } bitflags = { version = "2", features = ["serde"] }
bytemuck = "1" bytemuck = "1"
comemo = "0.3" comemo = "0.3"
ecow = "0.1" ecow = { version = "0.1.1", features = ["serde"] }
flate2 = "1" flate2 = "1"
fontdb = "0.13" fontdb = "0.13"
if_chain = "1" if_chain = "1"
@ -54,6 +54,7 @@ siphasher = "0.3"
subsetter = "0.1.1" subsetter = "0.1.1"
svg2pdf = { git = "https://github.com/typst/svg2pdf" } svg2pdf = { git = "https://github.com/typst/svg2pdf" }
tiny-skia = "0.9.0" tiny-skia = "0.9.0"
toml = { version = "0.7.3", default-features = false, features = ["parse"] }
tracing = "0.1.37" tracing = "0.1.37"
ttf-parser = "0.18.1" ttf-parser = "0.18.1"
unicode-general-category = "0.6" unicode-general-category = "0.6"

View File

@ -176,6 +176,9 @@ cargo build --release
The optimized binary will be stored in `target/release/`. The optimized binary will be stored in `target/release/`.
Another good way to contribute is by [sharing packages][packages] with the
community.
## Pronunciation and Spelling ## Pronunciation and Spelling
IPA: /taɪpst/. "Ty" like in **Ty**pesetting and "pst" like in Hi**pst**er. When IPA: /taɪpst/. "Ty" like in **Ty**pesetting and "pst" like in Hi**pst**er. When
writing about Typst, capitalize its name as a proper noun, with a capital "T". writing about Typst, capitalize its name as a proper noun, with a capital "T".
@ -219,4 +222,5 @@ instant preview. To achieve these goals, we follow three core design principles:
[releases]: https://github.com/typst/typst/releases/ [releases]: https://github.com/typst/typst/releases/
[architecture]: https://github.com/typst/typst/blob/main/ARCHITECTURE.md [architecture]: https://github.com/typst/typst/blob/main/ARCHITECTURE.md
[contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md [contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md
[packages]: https://github.com/typst/packages/
[`comemo`]: https://github.com/typst/comemo/ [`comemo`]: https://github.com/typst/comemo/

View File

@ -27,7 +27,7 @@ clap = { version = "4.2.4", features = ["derive", "env"] }
codespan-reporting = "0.11" codespan-reporting = "0.11"
comemo = "0.3" comemo = "0.3"
dirs = "5" dirs = "5"
elsa = "1.8" flate2 = "1"
inferno = "0.11.15" inferno = "0.11.15"
memmap2 = "0.5" memmap2 = "0.5"
notify = "5" notify = "5"
@ -35,11 +35,13 @@ once_cell = "1"
open = "4.0.2" open = "4.0.2"
same-file = "1" same-file = "1"
siphasher = "0.3" siphasher = "0.3"
tar = "0.4"
tempfile = "3.5.0" tempfile = "3.5.0"
tracing = "0.1.37" tracing = "0.1.37"
tracing-error = "0.2" tracing-error = "0.2"
tracing-flame = "0.2.0" tracing-flame = "0.2.0"
tracing-subscriber = "0.3.17" tracing-subscriber = "0.3.17"
ureq = "2"
walkdir = "2" walkdir = "2"
[build-dependencies] [build-dependencies]

View File

@ -12,13 +12,14 @@ pub struct CliArguments {
pub command: Command, pub command: Command,
/// Add additional directories to search for fonts /// Add additional directories to search for fonts
#[clap(long = "font-path", env = "TYPST_FONT_PATHS", value_name = "DIR", action = ArgAction::Append)] #[clap(
long = "font-path",
env = "TYPST_FONT_PATHS",
value_name = "DIR",
action = ArgAction::Append,
)]
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
/// Configure the root for absolute paths
#[clap(long = "root", env = "TYPST_ROOT", value_name = "DIR")]
pub root: Option<PathBuf>,
/// Sets the level of logging verbosity: /// Sets the level of logging verbosity:
/// -v = warning & error, -vv = info, -vvv = debug, -vvvv = trace /// -v = warning & error, -vv = info, -vvv = debug, -vvvv = trace
#[clap(short, long, action = ArgAction::Count)] #[clap(short, long, action = ArgAction::Count)]

View File

@ -2,7 +2,8 @@ mod args;
mod trace; mod trace;
use std::cell::{Cell, RefCell, RefMut}; use std::cell::{Cell, RefCell, RefMut};
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use std::env;
use std::fs::{self, File}; use std::fs::{self, File};
use std::hash::Hash; use std::hash::Hash;
use std::io::{self, IsTerminal, Write}; use std::io::{self, IsTerminal, Write};
@ -14,20 +15,22 @@ use clap::Parser;
use codespan_reporting::diagnostic::{Diagnostic, Label}; use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::term::{self, termcolor}; use codespan_reporting::term::{self, termcolor};
use comemo::Prehashed; use comemo::Prehashed;
use elsa::FrozenVec;
use memmap2::Mmap; use memmap2::Mmap;
use notify::{RecommendedWatcher, RecursiveMode, Watcher}; use notify::{RecommendedWatcher, RecursiveMode, Watcher};
use same_file::{is_same_file, Handle}; use same_file::{is_same_file, Handle};
use siphasher::sip128::{Hasher128, SipHasher13}; use siphasher::sip128::{Hasher128, SipHasher13};
use std::cell::OnceCell; use std::cell::OnceCell;
use termcolor::{ColorChoice, StandardStream, WriteColor}; use termcolor::{ColorChoice, StandardStream, WriteColor};
use typst::diag::{bail, FileError, FileResult, SourceError, StrResult}; use typst::diag::{
bail, FileError, FileResult, PackageError, PackageResult, SourceError, StrResult,
};
use typst::doc::Document; use typst::doc::Document;
use typst::eval::{eco_format, Datetime, Library}; use typst::eval::{eco_format, Datetime, Library};
use typst::file::{FileId, PackageSpec};
use typst::font::{Font, FontBook, FontInfo, FontVariant}; use typst::font::{Font, FontBook, FontInfo, FontVariant};
use typst::geom::Color; use typst::geom::Color;
use typst::syntax::{Source, SourceId}; use typst::syntax::Source;
use typst::util::{Buffer, PathExt}; use typst::util::{Bytes, PathExt};
use typst::World; use typst::World;
use walkdir::WalkDir; use walkdir::WalkDir;
@ -96,8 +99,6 @@ struct CompileSettings {
output: PathBuf, output: PathBuf,
/// Whether to watch the input files for changes. /// Whether to watch the input files for changes.
watch: bool, watch: bool,
/// The root directory for absolute paths.
root: Option<PathBuf>,
/// The paths to search for fonts. /// The paths to search for fonts.
font_paths: Vec<PathBuf>, font_paths: Vec<PathBuf>,
/// The open command to use. /// The open command to use.
@ -115,7 +116,6 @@ impl CompileSettings {
input: PathBuf, input: PathBuf,
output: Option<PathBuf>, output: Option<PathBuf>,
watch: bool, watch: bool,
root: Option<PathBuf>,
font_paths: Vec<PathBuf>, font_paths: Vec<PathBuf>,
open: Option<Option<String>>, open: Option<Option<String>>,
ppi: Option<f32>, ppi: Option<f32>,
@ -129,7 +129,6 @@ impl CompileSettings {
input, input,
output, output,
watch, watch,
root,
font_paths, font_paths,
open, open,
diagnostic_format, diagnostic_format,
@ -150,16 +149,7 @@ impl CompileSettings {
_ => unreachable!(), _ => unreachable!(),
}; };
Self::new( Self::new(input, output, watch, args.font_paths, open, ppi, diagnostic_format)
input,
output,
watch,
args.root,
args.font_paths,
open,
ppi,
diagnostic_format,
)
} }
} }
@ -190,20 +180,8 @@ impl FontsSettings {
/// Execute a compilation command. /// Execute a compilation command.
fn compile(mut command: CompileSettings) -> StrResult<()> { fn compile(mut command: CompileSettings) -> StrResult<()> {
// Determine the parent directory of the input file. // Create the world that serves sources, files, and fonts.
let parent = command let mut world = SystemWorld::new(&command.input, &command.font_paths);
.input
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.parent())
.unwrap_or(Path::new("."))
.to_owned();
let root = command.root.as_ref().unwrap_or(&parent);
// Create the world that serves sources, fonts and files.
let mut world = SystemWorld::new(root.into(), &command.font_paths);
// Perform initial compilation. // Perform initial compilation.
let ok = compile_once(&mut world, &command)?; let ok = compile_once(&mut world, &command)?;
@ -223,29 +201,10 @@ fn compile(mut command: CompileSettings) -> StrResult<()> {
// Setup file watching. // Setup file watching.
let (tx, rx) = std::sync::mpsc::channel(); let (tx, rx) = std::sync::mpsc::channel();
let mut watcher = RecommendedWatcher::new(tx, notify::Config::default()) let mut watcher = RecommendedWatcher::new(tx, notify::Config::default())
.map_err(|_| "failed to watch directory")?; .map_err(|_| "failed to setup file watching")?;
// Watch the input file's parent directory recursively. // Watch all the files that are used by the input file and its dependencies.
watcher world.watch(&mut watcher, HashSet::new())?;
.watch(&parent, RecursiveMode::Recursive)
.map_err(|_| "failed to watch parent directory")?;
// Watch the root directory recursively.
if world.root != parent {
watcher
.watch(&world.root, RecursiveMode::Recursive)
.map_err(|_| "failed to watch root directory")?;
}
// Watch all the files that are used in the input file and its dependencies
let mut dependencies = world.dependencies();
for dep in &dependencies {
tracing::debug!("Watching {:?}", dep);
watcher
.watch(dep, RecursiveMode::NonRecursive)
.map_err(|_| format!("failed to watch {:?}", dep))?;
}
// Handle events. // Handle events.
let timeout = std::time::Duration::from_millis(100); let timeout = std::time::Duration::from_millis(100);
@ -265,28 +224,21 @@ fn compile(mut command: CompileSettings) -> StrResult<()> {
continue; continue;
} }
recompile |= world.relevant(&event); recompile |= is_event_relevant(&event);
} }
if recompile { if recompile {
// Retrieve the dependencies of the last compilation.
let dependencies = world.dependencies();
// Recompile.
let ok = compile_once(&mut world, &command)?; let ok = compile_once(&mut world, &command)?;
comemo::evict(30); comemo::evict(10);
// Unwatch all the previous dependencies before watching the new dependencies // Adjust the watching.
for dep in &dependencies { world.watch(&mut watcher, dependencies)?;
watcher
.unwatch(dep)
.map_err(|_| format!("failed to unwatch {:?}", dep))?;
}
dependencies = world.dependencies();
for dep in &dependencies {
tracing::debug!("Watching {:?}", dep);
watcher
.watch(dep, RecursiveMode::NonRecursive)
.map_err(|_| format!("failed to watch {:?}", dep))?;
}
// Ipen the file if requested, this must be done on the first // Open the file if requested, this must be done on the first
// **successful** compilation // **successful** compilation
if ok { if ok {
if let Some(open) = command.open.take() { if let Some(open) = command.open.take() {
@ -307,8 +259,9 @@ fn compile_once(world: &mut SystemWorld, command: &CompileSettings) -> StrResult
let start = std::time::Instant::now(); let start = std::time::Instant::now();
status(command, Status::Compiling).unwrap(); status(command, Status::Compiling).unwrap();
// Reset everything and ensure that the main file is still present.
world.reset(); world.reset();
world.main = world.resolve(&command.input).map_err(|err| err.to_string())?; world.source(world.main).map_err(|err| err.to_string())?;
let result = typst::compile(world); let result = typst::compile(world);
let duration = start.elapsed(); let duration = start.elapsed();
@ -461,7 +414,6 @@ fn print_diagnostics(
for error in errors { for error in errors {
// The main diagnostic. // The main diagnostic.
let range = error.range(world);
let diag = Diagnostic::error() let diag = Diagnostic::error()
.with_message(error.message) .with_message(error.message)
.with_notes( .with_notes(
@ -471,7 +423,7 @@ fn print_diagnostics(
.map(|e| (eco_format!("hint: {e}")).into()) .map(|e| (eco_format!("hint: {e}")).into())
.collect(), .collect(),
) )
.with_labels(vec![Label::primary(error.span.source(), range)]); .with_labels(vec![Label::primary(error.span.id(), error.span.range(world))]);
term::emit(&mut w, &config, world, &diag)?; term::emit(&mut w, &config, world, &diag)?;
@ -479,10 +431,7 @@ fn print_diagnostics(
for point in error.trace { for point in error.trace {
let message = point.v.to_string(); let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![ let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary( Label::primary(point.span.id(), point.span.range(world)),
point.span.source(),
world.source(point.span.source()).range(point.span),
),
]); ]);
term::emit(&mut w, &config, world, &help)?; term::emit(&mut w, &config, world, &help)?;
@ -492,19 +441,6 @@ fn print_diagnostics(
Ok(()) Ok(())
} }
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Execute a font listing command. /// Execute a font listing command.
fn fonts(command: FontsSettings) -> StrResult<()> { fn fonts(command: FontsSettings) -> StrResult<()> {
let mut searcher = FontSearcher::new(); let mut searcher = FontSearcher::new();
@ -525,196 +461,224 @@ fn fonts(command: FontsSettings) -> StrResult<()> {
/// A world that provides access to the operating system. /// A world that provides access to the operating system.
struct SystemWorld { struct SystemWorld {
/// The root relative to which absolute paths are resolved.
root: PathBuf, root: PathBuf,
/// The input path.
main: FileId,
/// Typst's standard library.
library: Prehashed<Library>, library: Prehashed<Library>,
/// Metadata about discovered fonts.
book: Prehashed<FontBook>, book: Prehashed<FontBook>,
/// Locations of and storage for lazily loaded fonts.
fonts: Vec<FontSlot>, fonts: Vec<FontSlot>,
hashes: RefCell<HashMap<PathBuf, FileResult<PathHash>>>, /// Maps package-path combinations to canonical hashes. All package-path
/// combinations that point to the same file are mapped to the same hash. To
/// be used in conjunction with `paths`.
hashes: RefCell<HashMap<FileId, FileResult<PathHash>>>,
/// Maps canonical path hashes to source files and buffers.
paths: RefCell<HashMap<PathHash, PathSlot>>, paths: RefCell<HashMap<PathHash, PathSlot>>,
sources: FrozenVec<Box<Source>>, /// The current date if requested. This is stored here to ensure it is
today: Cell<Option<Datetime>>, /// always the same within one compilation. Reset between compilations.
main: SourceId, today: OnceCell<Option<Datetime>>,
dependencies: RefCell<Vec<PathBuf>>,
} }
/// Holds details about the location of a font and lazily the font itself. /// Holds details about the location of a font and lazily the font itself.
struct FontSlot { struct FontSlot {
/// The path at which the font can be found on the system.
path: PathBuf, path: PathBuf,
/// The index of the font in its collection. Zero if the path does not point
/// to a collection.
index: u32, index: u32,
/// The lazily loaded font.
font: OnceCell<Option<Font>>, font: OnceCell<Option<Font>>,
} }
/// Holds canonical data for all paths pointing to the same entity. /// Holds canonical data for all paths pointing to the same entity.
#[derive(Default)] ///
/// Both fields can be populated if the file is both imported and read().
struct PathSlot { struct PathSlot {
source: OnceCell<FileResult<SourceId>>, /// The slot's path on the system.
buffer: OnceCell<FileResult<Buffer>>, system_path: PathBuf,
/// The lazily loaded source file for a path hash.
source: OnceCell<FileResult<Source>>,
/// The lazily loaded buffer for a path hash.
buffer: OnceCell<FileResult<Bytes>>,
} }
impl SystemWorld { impl SystemWorld {
fn new(root: PathBuf, font_paths: &[PathBuf]) -> Self { fn new(input: &Path, font_paths: &[PathBuf]) -> Self {
let mut searcher = FontSearcher::new(); let mut searcher = FontSearcher::new();
searcher.search(font_paths); searcher.search(font_paths);
let root = input
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.parent())
.unwrap_or(Path::new("."))
.to_owned();
let file = input.file_name().unwrap_or(input.as_os_str());
let main = FileId::new(None, Path::new(file));
Self { Self {
root, root,
main,
library: Prehashed::new(typst_library::build()), library: Prehashed::new(typst_library::build()),
book: Prehashed::new(searcher.book), book: Prehashed::new(searcher.book),
fonts: searcher.fonts, fonts: searcher.fonts,
hashes: RefCell::default(), hashes: RefCell::default(),
paths: RefCell::default(), paths: RefCell::default(),
sources: FrozenVec::new(), today: OnceCell::new(),
today: Cell::new(None),
main: SourceId::detached(),
dependencies: RefCell::default(),
} }
} }
} }
impl World for SystemWorld { impl World for SystemWorld {
fn root(&self) -> &Path {
&self.root
}
fn library(&self) -> &Prehashed<Library> { fn library(&self) -> &Prehashed<Library> {
&self.library &self.library
} }
fn main(&self) -> &Source {
self.source(self.main)
}
#[tracing::instrument(skip_all)]
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
self.slot(path)?
.source
.get_or_init(|| {
let buf = read(path)?;
let text = if buf.starts_with(b"\xef\xbb\xbf") {
// remove UTF-8 BOM
std::str::from_utf8(&buf[3..])?.to_owned()
} else {
// Assume UTF-8
String::from_utf8(buf)?
};
self.dependencies.borrow_mut().push(path.to_owned());
Ok(self.insert(path, text))
})
.clone()
}
fn source(&self, id: SourceId) -> &Source {
&self.sources[id.as_u16() as usize]
}
fn book(&self) -> &Prehashed<FontBook> { fn book(&self) -> &Prehashed<FontBook> {
&self.book &self.book
} }
fn main(&self) -> Source {
self.source(self.main).unwrap()
}
fn source(&self, id: FileId) -> FileResult<Source> {
let slot = self.slot(id)?;
slot.source
.get_or_init(|| {
let buf = read(&slot.system_path)?;
let text = decode_utf8(buf)?;
Ok(Source::new(id, text))
})
.clone()
}
fn file(&self, id: FileId) -> FileResult<Bytes> {
let slot = self.slot(id)?;
slot.buffer
.get_or_init(|| read(&slot.system_path).map(Bytes::from))
.clone()
}
fn font(&self, id: usize) -> Option<Font> { fn font(&self, id: usize) -> Option<Font> {
let slot = &self.fonts[id]; let slot = &self.fonts[id];
slot.font slot.font
.get_or_init(|| { .get_or_init(|| {
let data = self.file(&slot.path).ok()?; let data = read(&slot.path).ok()?.into();
Font::new(data, slot.index) Font::new(data, slot.index)
}) })
.clone() .clone()
} }
fn file(&self, path: &Path) -> FileResult<Buffer> {
self.slot(path)?
.buffer
.get_or_init(|| {
self.dependencies.borrow_mut().push(path.to_owned());
read(path).map(Buffer::from)
})
.clone()
}
fn today(&self, offset: Option<i64>) -> Option<Datetime> { fn today(&self, offset: Option<i64>) -> Option<Datetime> {
if self.today.get().is_none() { *self.today.get_or_init(|| {
let datetime = match offset { let naive = match offset {
None => chrono::Local::now().naive_local(), None => chrono::Local::now().naive_local(),
Some(o) => (chrono::Utc::now() + chrono::Duration::hours(o)).naive_utc(), Some(o) => (chrono::Utc::now() + chrono::Duration::hours(o)).naive_utc(),
}; };
self.today.set(Some(Datetime::from_ymd( Datetime::from_ymd(
datetime.year(), naive.year(),
datetime.month().try_into().ok()?, naive.month().try_into().ok()?,
datetime.day().try_into().ok()?, naive.day().try_into().ok()?,
)?)) )
} })
self.today.get()
} }
} }
impl SystemWorld { impl SystemWorld {
/// Access the canonical slot for the given path.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn slot(&self, path: &Path) -> FileResult<RefMut<PathSlot>> { fn slot(&self, id: FileId) -> FileResult<RefMut<PathSlot>> {
let mut hashes = self.hashes.borrow_mut(); let mut system_path = PathBuf::new();
let hash = match hashes.get(path).cloned() { let hash = self
Some(hash) => hash, .hashes
None => { .borrow_mut()
let hash = PathHash::new(path); .entry(id)
if let Ok(canon) = path.canonicalize() { .or_insert_with(|| {
hashes.insert(canon.normalize(), hash.clone()); // Determine the root path relative to which the file path
} // will be resolved.
hashes.insert(path.into(), hash.clone()); let root = match id.package() {
hash Some(spec) => prepare_package(spec)?,
} None => self.root.clone(),
}?; };
Ok(std::cell::RefMut::map(self.paths.borrow_mut(), |paths| { // Join the path to the root. If it tries to escape, deny
paths.entry(hash).or_default() // access. Note: It can still escape via symlinks.
system_path =
root.join_rooted(id.path()).ok_or(FileError::AccessDenied)?;
PathHash::new(&system_path)
})
.clone()?;
Ok(RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(hash).or_insert_with(|| PathSlot {
// This will only trigger if the `or_insert_with` above also
// triggered.
system_path,
source: OnceCell::new(),
buffer: OnceCell::new(),
})
})) }))
} }
/// Collect all paths the last compilation depended on.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn insert(&self, path: &Path, text: String) -> SourceId { fn dependencies(&self) -> HashSet<PathBuf> {
let id = SourceId::from_u16(self.sources.len() as u16); self.paths
let source = Source::new(id, path, text); .borrow()
self.sources.push(Box::new(source)); .values()
id .map(|slot| slot.system_path.clone())
.collect()
} }
fn relevant(&mut self, event: &notify::Event) -> bool { /// Adjust the file watching. Watches all new dependencies and unwatches
match &event.kind { /// all `previous` dependencies that are not relevant anymore.
notify::EventKind::Any => {} #[tracing::instrument(skip_all)]
notify::EventKind::Access(_) => return false, fn watch(
notify::EventKind::Create(_) => return true, &self,
notify::EventKind::Modify(kind) => match kind { watcher: &mut dyn Watcher,
notify::event::ModifyKind::Any => {} mut previous: HashSet<PathBuf>,
notify::event::ModifyKind::Data(_) => {} ) -> StrResult<()> {
notify::event::ModifyKind::Metadata(_) => return false, // Watch new paths that weren't watched yet.
notify::event::ModifyKind::Name(_) => return true, for slot in self.paths.borrow().values() {
notify::event::ModifyKind::Other => return false, let path = &slot.system_path;
}, let watched = previous.remove(path);
notify::EventKind::Remove(_) => {} if path.exists() && !watched {
notify::EventKind::Other => return false, tracing::info!("Watching {}", path.display());
watcher
.watch(path, RecursiveMode::NonRecursive)
.map_err(|_| eco_format!("failed to watch {path:?}"))?;
}
} }
event.paths.iter().any(|path| self.dependant(path)) // Unwatch old paths that don't need to be watched anymore.
} for path in previous {
tracing::info!("Unwatching {}", path.display());
fn dependant(&self, path: &Path) -> bool { watcher.unwatch(&path).ok();
self.hashes.borrow().contains_key(&path.normalize()) }
|| PathHash::new(path)
.map_or(false, |hash| self.paths.borrow().contains_key(&hash)) Ok(())
} }
/// Reset th compilation state in preparation of a new compilation.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn reset(&mut self) { fn reset(&mut self) {
self.sources.as_mut().clear();
self.hashes.borrow_mut().clear(); self.hashes.borrow_mut().clear();
self.paths.borrow_mut().clear(); self.paths.borrow_mut().clear();
self.today.set(None); self.today.take();
self.dependencies.borrow_mut().clear();
} }
// Return a list of files the document depends on /// Lookup a source file by id.
fn dependencies(&self) -> Vec<PathBuf> { #[track_caller]
self.dependencies.borrow().clone() fn lookup(&self, id: FileId) -> Source {
self.source(id).expect("file id does not point to any source file")
} }
} }
@ -743,21 +707,130 @@ fn read(path: &Path) -> FileResult<Vec<u8>> {
} }
} }
/// Decode UTF-8 with an optional BOM.
fn decode_utf8(buf: Vec<u8>) -> FileResult<String> {
Ok(if buf.starts_with(b"\xef\xbb\xbf") {
// Remove UTF-8 BOM.
std::str::from_utf8(&buf[3..])?.into()
} else {
// Assume UTF-8.
String::from_utf8(buf)?
})
}
/// Make a package available in the on-disk cache.
fn prepare_package(spec: &PackageSpec) -> PackageResult<PathBuf> {
let subdir =
format!("typst/packages/{}/{}-{}", spec.namespace, spec.name, spec.version);
if let Some(data_dir) = dirs::data_dir() {
let dir = data_dir.join(&subdir);
if dir.exists() {
return Ok(dir);
}
}
if let Some(cache_dir) = dirs::cache_dir() {
let dir = cache_dir.join(&subdir);
// Download from network if it doesn't exist yet.
if spec.namespace == "preview" && !dir.exists() {
download_package(spec, &dir)?;
}
if dir.exists() {
return Ok(dir);
}
}
Err(PackageError::NotFound(spec.clone()))
}
/// Download a package over the network.
fn download_package(spec: &PackageSpec, package_dir: &Path) -> PackageResult<()> {
// The `@preview` namespace is the only namespace that supports on-demand
// fetching.
assert_eq!(spec.namespace, "preview");
let url = format!(
"https://packages.typst.org/preview/{}-{}.tar.gz",
spec.name, spec.version
);
print_downloading(spec).unwrap();
let reader = match ureq::get(&url).call() {
Ok(response) => response.into_reader(),
Err(ureq::Error::Status(404, _)) => {
return Err(PackageError::NotFound(spec.clone()))
}
Err(_) => return Err(PackageError::NetworkFailed),
};
let decompressed = flate2::read::GzDecoder::new(reader);
tar::Archive::new(decompressed).unpack(package_dir).map_err(|_| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive
})
}
/// Print that a package downloading is happening.
fn print_downloading(spec: &PackageSpec) -> io::Result<()> {
let mut w = color_stream();
let styles = term::Styles::default();
w.set_color(&styles.header_help)?;
write!(w, "downloading")?;
w.reset()?;
writeln!(w, " {spec}")
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Whether a watch event is relevant for compilation.
fn is_event_relevant(event: &notify::Event) -> bool {
match &event.kind {
notify::EventKind::Any => true,
notify::EventKind::Access(_) => false,
notify::EventKind::Create(_) => true,
notify::EventKind::Modify(kind) => match kind {
notify::event::ModifyKind::Any => true,
notify::event::ModifyKind::Data(_) => true,
notify::event::ModifyKind::Metadata(_) => false,
notify::event::ModifyKind::Name(_) => true,
notify::event::ModifyKind::Other => false,
},
notify::EventKind::Remove(_) => true,
notify::EventKind::Other => false,
}
}
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld { impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = SourceId; type FileId = FileId;
type Name = std::path::Display<'a>; type Name = FileId;
type Source = &'a str; type Source = Source;
fn name(&'a self, id: SourceId) -> CodespanResult<Self::Name> { fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
Ok(World::source(self, id).path().display()) Ok(id)
} }
fn source(&'a self, id: SourceId) -> CodespanResult<Self::Source> { fn source(&'a self, id: FileId) -> CodespanResult<Self::Source> {
Ok(World::source(self, id).text()) Ok(self.lookup(id))
} }
fn line_index(&'a self, id: SourceId, given: usize) -> CodespanResult<usize> { fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult<usize> {
let source = World::source(self, id); let source = self.lookup(id);
source source
.byte_to_line(given) .byte_to_line(given)
.ok_or_else(|| CodespanError::IndexTooLarge { .ok_or_else(|| CodespanError::IndexTooLarge {
@ -768,10 +841,10 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
fn line_range( fn line_range(
&'a self, &'a self,
id: SourceId, id: FileId,
given: usize, given: usize,
) -> CodespanResult<std::ops::Range<usize>> { ) -> CodespanResult<std::ops::Range<usize>> {
let source = World::source(self, id); let source = self.lookup(id);
source source
.line_to_range(given) .line_to_range(given)
.ok_or_else(|| CodespanError::LineTooLarge { given, max: source.len_lines() }) .ok_or_else(|| CodespanError::LineTooLarge { given, max: source.len_lines() })
@ -779,11 +852,11 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
fn column_number( fn column_number(
&'a self, &'a self,
id: SourceId, id: FileId,
_: usize, _: usize,
given: usize, given: usize,
) -> CodespanResult<usize> { ) -> CodespanResult<usize> {
let source = World::source(self, id); let source = self.lookup(id);
source.byte_to_column(given).ok_or_else(|| { source.byte_to_column(given).ok_or_else(|| {
let max = source.len_bytes(); let max = source.len_bytes();
if given <= max { if given <= max {
@ -823,7 +896,7 @@ impl FontSearcher {
#[cfg(feature = "embed-fonts")] #[cfg(feature = "embed-fonts")]
fn search_embedded(&mut self) { fn search_embedded(&mut self) {
let mut search = |bytes: &'static [u8]| { let mut search = |bytes: &'static [u8]| {
let buffer = Buffer::from_static(bytes); let buffer = Bytes::from_static(bytes);
for (i, font) in Font::iter(buffer).enumerate() { for (i, font) in Font::iter(buffer).enumerate() {
self.book.push(font.info().clone()); self.book.push(font.info().clone());
self.fonts.push(FontSlot { self.fonts.push(FontSlot {
@ -852,45 +925,36 @@ impl FontSearcher {
} }
/// Search for fonts in the linux system font directories. /// Search for fonts in the linux system font directories.
#[cfg(all(unix, not(target_os = "macos")))]
fn search_system(&mut self) { fn search_system(&mut self) {
self.search_dir("/usr/share/fonts"); if cfg!(target_os = "macos") {
self.search_dir("/usr/local/share/fonts"); self.search_dir("/Library/Fonts");
self.search_dir("/Network/Library/Fonts");
self.search_dir("/System/Library/Fonts");
} else if cfg!(unix) {
self.search_dir("/usr/share/fonts");
self.search_dir("/usr/local/share/fonts");
} else if cfg!(windows) {
self.search_dir(
env::var_os("WINDIR")
.map(PathBuf::from)
.unwrap_or_else(|| "C:\\Windows".into())
.join("Fonts"),
);
if let Some(roaming) = dirs::config_dir() {
self.search_dir(roaming.join("Microsoft\\Windows\\Fonts"));
}
if let Some(local) = dirs::cache_dir() {
self.search_dir(local.join("Microsoft\\Windows\\Fonts"));
}
}
if let Some(dir) = dirs::font_dir() { if let Some(dir) = dirs::font_dir() {
self.search_dir(dir); self.search_dir(dir);
} }
} }
/// Search for fonts in the macOS system font directories.
#[cfg(target_os = "macos")]
fn search_system(&mut self) {
self.search_dir("/Library/Fonts");
self.search_dir("/Network/Library/Fonts");
self.search_dir("/System/Library/Fonts");
if let Some(dir) = dirs::font_dir() {
self.search_dir(dir);
}
}
/// Search for fonts in the Windows system font directories.
#[cfg(windows)]
fn search_system(&mut self) {
let windir =
std::env::var("WINDIR").unwrap_or_else(|_| "C:\\Windows".to_string());
self.search_dir(Path::new(&windir).join("Fonts"));
if let Some(roaming) = dirs::config_dir() {
self.search_dir(roaming.join("Microsoft\\Windows\\Fonts"));
}
if let Some(local) = dirs::cache_dir() {
self.search_dir(local.join("Microsoft\\Windows\\Fonts"));
}
}
/// Search for all fonts in a directory recursively. /// Search for all fonts in a directory recursively.
fn search_dir(&mut self, path: impl AsRef<Path>) { fn search_dir(&mut self, path: impl AsRef<Path>) {
for entry in WalkDir::new(path) for entry in WalkDir::new(path)

View File

@ -6,10 +6,11 @@ use pulldown_cmark as md;
use typed_arena::Arena; use typed_arena::Arena;
use typst::diag::FileResult; use typst::diag::FileResult;
use typst::eval::Datetime; use typst::eval::Datetime;
use typst::file::FileId;
use typst::font::{Font, FontBook}; use typst::font::{Font, FontBook};
use typst::geom::{Point, Size}; use typst::geom::{Point, Size};
use typst::syntax::{Source, SourceId}; use typst::syntax::Source;
use typst::util::Buffer; use typst::util::Bytes;
use typst::World; use typst::World;
use yaml_front_matter::YamlFrontMatter; use yaml_front_matter::YamlFrontMatter;
@ -414,7 +415,8 @@ fn code_block(resolver: &dyn Resolver, lang: &str, text: &str) -> Html {
return Html::new(format!("<pre>{}</pre>", highlighted.as_str())); return Html::new(format!("<pre>{}</pre>", highlighted.as_str()));
} }
let source = Source::new(SourceId::from_u16(0), Path::new("main.typ"), compile); let id = FileId::new(None, Path::new("main.typ"));
let source = Source::new(id, compile);
let world = DocWorld(source); let world = DocWorld(source);
let mut frames = match typst::compile(&world) { let mut frames = match typst::compile(&world) {
Ok(doc) => doc.pages, Ok(doc) => doc.pages,
@ -461,7 +463,7 @@ fn nest_heading(level: &mut md::HeadingLevel) {
}; };
} }
/// World for example compilations. /// A world for example compilations.
struct DocWorld(Source); struct DocWorld(Source);
impl World for DocWorld { impl World for DocWorld {
@ -469,35 +471,31 @@ impl World for DocWorld {
&LIBRARY &LIBRARY
} }
fn main(&self) -> &Source {
&self.0
}
fn resolve(&self, _: &Path) -> FileResult<SourceId> {
unimplemented!()
}
fn source(&self, id: SourceId) -> &Source {
assert_eq!(id.as_u16(), 0, "invalid source id");
&self.0
}
fn book(&self) -> &Prehashed<FontBook> { fn book(&self) -> &Prehashed<FontBook> {
&FONTS.0 &FONTS.0
} }
fn font(&self, id: usize) -> Option<Font> { fn main(&self) -> Source {
Some(FONTS.1[id].clone()) self.0.clone()
} }
fn file(&self, path: &Path) -> FileResult<Buffer> { fn source(&self, _: FileId) -> FileResult<Source> {
Ok(self.0.clone())
}
fn file(&self, id: FileId) -> FileResult<Bytes> {
assert!(id.package().is_none());
Ok(FILES Ok(FILES
.get_file(path) .get_file(id.path())
.unwrap_or_else(|| panic!("failed to load {path:?}")) .unwrap_or_else(|| panic!("failed to load {:?}", id.path().display()))
.contents() .contents()
.into()) .into())
} }
fn font(&self, index: usize) -> Option<Font> {
Some(FONTS.1[index].clone())
}
fn today(&self, _: Option<i64>) -> Option<Datetime> { fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap()) Some(Datetime::from_ymd(1970, 1, 1).unwrap())
} }

View File

@ -57,6 +57,7 @@ pub fn provide(resolver: &dyn Resolver) -> Vec<PageModel> {
tutorial_pages(resolver), tutorial_pages(resolver),
reference_pages(resolver), reference_pages(resolver),
guides_pages(resolver), guides_pages(resolver),
packages_page(),
markdown_page(resolver, "/docs/", "general/changelog.md"), markdown_page(resolver, "/docs/", "general/changelog.md"),
markdown_page(resolver, "/docs/", "general/community.md"), markdown_page(resolver, "/docs/", "general/community.md"),
] ]
@ -118,6 +119,7 @@ pub enum BodyModel {
Funcs(FuncsModel), Funcs(FuncsModel),
Type(TypeModel), Type(TypeModel),
Symbols(SymbolsModel), Symbols(SymbolsModel),
Packages,
} }
/// Build the tutorial. /// Build the tutorial.
@ -133,14 +135,6 @@ fn tutorial_pages(resolver: &dyn Resolver) -> PageModel {
page page
} }
/// Build the guides section.
fn guides_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "guides/welcome.md");
page.children =
vec![markdown_page(resolver, "/docs/guides/", "guides/guide-for-latex-users.md")];
page
}
/// Build the reference. /// Build the reference.
fn reference_pages(resolver: &dyn Resolver) -> PageModel { fn reference_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "reference/welcome.md"); let mut page = markdown_page(resolver, "/docs/", "reference/welcome.md");
@ -164,6 +158,27 @@ fn reference_pages(resolver: &dyn Resolver) -> PageModel {
page page
} }
/// Build the guides section.
fn guides_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "guides/welcome.md");
page.children =
vec![markdown_page(resolver, "/docs/guides/", "guides/guide-for-latex-users.md")];
page
}
/// Build the packages section.
fn packages_page() -> PageModel {
PageModel {
route: "/docs/packages/".into(),
title: "Packages".into(),
description: "Packages for Typst.".into(),
part: None,
outline: vec![],
body: BodyModel::Packages,
children: vec![],
}
}
/// Create a page from a markdown file. /// Create a page from a markdown file.
#[track_caller] #[track_caller]
fn markdown_page( fn markdown_page(

View File

@ -25,8 +25,8 @@ pub fn read(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Str> { ) -> SourceResult<Str> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let text = std::str::from_utf8(&data) let text = std::str::from_utf8(&data)
.map_err(|_| "file is not valid utf-8") .map_err(|_| "file is not valid utf-8")
.at(span)?; .at(span)?;
@ -66,8 +66,8 @@ pub fn csv(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Array> { ) -> SourceResult<Array> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let mut builder = csv::ReaderBuilder::new(); let mut builder = csv::ReaderBuilder::new();
builder.has_headers(false); builder.has_headers(false);
@ -177,8 +177,8 @@ pub fn json(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let value: serde_json::Value = let value: serde_json::Value =
serde_json::from_slice(&data).map_err(format_json_error).at(span)?; serde_json::from_slice(&data).map_err(format_json_error).at(span)?;
Ok(convert_json(value)) Ok(convert_json(value))
@ -243,8 +243,8 @@ pub fn toml(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let raw = std::str::from_utf8(&data) let raw = std::str::from_utf8(&data)
.map_err(|_| "file is not valid utf-8") .map_err(|_| "file is not valid utf-8")
@ -352,8 +352,8 @@ pub fn yaml(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let value: serde_yaml::Value = let value: serde_yaml::Value =
serde_yaml::from_slice(&data).map_err(format_yaml_error).at(span)?; serde_yaml::from_slice(&data).map_err(format_yaml_error).at(span)?;
Ok(convert_yaml(value)) Ok(convert_yaml(value))
@ -455,8 +455,8 @@ pub fn xml(
vm: &mut Vm, vm: &mut Vm,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: path, span } = path; let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?; let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(&path).at(span)?; let data = vm.world().file(id).at(span)?;
let text = std::str::from_utf8(&data).map_err(FileError::from).at(span)?; let text = std::str::from_utf8(&data).map_err(FileError::from).at(span)?;
let document = roxmltree::Document::parse(text).map_err(format_xml_error).at(span)?; let document = roxmltree::Document::parse(text).map_err(format_xml_error).at(span)?;
Ok(convert_xml(document.root())) Ok(convert_xml(document.root()))

View File

@ -7,7 +7,8 @@ use ecow::{eco_vec, EcoVec};
use hayagriva::io::{BibLaTeXError, YamlBibliographyError}; use hayagriva::io::{BibLaTeXError, YamlBibliographyError};
use hayagriva::style::{self, Brackets, Citation, Database, DisplayString, Formatting}; use hayagriva::style::{self, Brackets, Citation, Database, DisplayString, Formatting};
use hayagriva::Entry; use hayagriva::Entry;
use typst::util::option_eq; use typst::diag::FileError;
use typst::util::{option_eq, Bytes};
use super::{LinkElem, LocalName, RefElem}; use super::{LinkElem, LocalName, RefElem};
use crate::layout::{BlockElem, GridElem, ParElem, Sizing, TrackSizings, VElem}; use crate::layout::{BlockElem, GridElem, ParElem, Sizing, TrackSizings, VElem};
@ -49,18 +50,31 @@ pub struct BibliographyElem {
/// Path to a Hayagriva `.yml` or BibLaTeX `.bib` file. /// Path to a Hayagriva `.yml` or BibLaTeX `.bib` file.
#[required] #[required]
#[parse( #[parse(
let Spanned { v: mut paths, span } = let Spanned { v: paths, span } =
args.expect::<Spanned<BibPaths>>("path to bibliography file")?; args.expect::<Spanned<BibPaths>>("path to bibliography file")?;
for path in &mut paths.0 {
// resolve paths // Load bibliography files.
*path = vm.locate(path).at(span)?.to_string_lossy().into(); let data = paths.0
} .iter()
// check that parsing works .map(|path| {
let _ = load(vm.world(), &paths).at(span)?; let id = vm.location().join(path).at(span)?;
vm.world().file(id).at(span)
})
.collect::<SourceResult<Vec<Bytes>>>()?;
// Check that parsing works.
let _ = load(&paths, &data).at(span)?;
paths paths
)] )]
pub path: BibPaths, pub path: BibPaths,
/// The raw file buffers.
#[internal]
#[required]
#[parse(data)]
pub data: Vec<Bytes>,
/// The title of the bibliography. /// The title of the bibliography.
/// ///
/// - When set to `{auto}`, an appropriate title for the [text /// - When set to `{auto}`, an appropriate title for the [text
@ -80,7 +94,7 @@ pub struct BibliographyElem {
pub style: BibliographyStyle, pub style: BibliographyStyle,
} }
/// A list of bib file paths. /// A list of bibliography file paths.
#[derive(Debug, Default, Clone, Hash)] #[derive(Debug, Default, Clone, Hash)]
pub struct BibPaths(Vec<EcoString>); pub struct BibPaths(Vec<EcoString>);
@ -111,18 +125,20 @@ impl BibliographyElem {
vt.introspector vt.introspector
.query(&Self::func().select()) .query(&Self::func().select())
.into_iter() .into_iter()
.flat_map(|elem| load(vt.world, &elem.to::<Self>().unwrap().path())) .flat_map(|elem| {
let elem = elem.to::<Self>().unwrap();
load(&elem.path(), &elem.data())
})
.flatten() .flatten()
.any(|entry| entry.key() == key) .any(|entry| entry.key() == key)
} }
/// Find all bibliography keys. /// Find all bibliography keys.
pub fn keys( pub fn keys(
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>, introspector: Tracked<Introspector>,
) -> Vec<(EcoString, Option<EcoString>)> { ) -> Vec<(EcoString, Option<EcoString>)> {
Self::find(introspector) Self::find(introspector)
.and_then(|elem| load(world, &elem.path())) .and_then(|elem| load(&elem.path(), &elem.data()))
.into_iter() .into_iter()
.flatten() .flatten()
.map(|entry| { .map(|entry| {
@ -425,19 +441,15 @@ impl Works {
_ => elem.to::<CiteElem>().unwrap().clone(), _ => elem.to::<CiteElem>().unwrap().clone(),
}) })
.collect(); .collect();
Ok(create(vt.world, bibliography, citations)) Ok(create(bibliography, citations))
} }
} }
/// Generate all citations and the whole bibliography. /// Generate all citations and the whole bibliography.
#[comemo::memoize] #[comemo::memoize]
fn create( fn create(bibliography: BibliographyElem, citations: Vec<CiteElem>) -> Arc<Works> {
world: Tracked<dyn World + '_>,
bibliography: BibliographyElem,
citations: Vec<CiteElem>,
) -> Arc<Works> {
let span = bibliography.span(); let span = bibliography.span();
let entries = load(world, &bibliography.path()).unwrap(); let entries = load(&bibliography.path(), &bibliography.data()).unwrap();
let style = bibliography.style(StyleChain::default()); let style = bibliography.style(StyleChain::default());
let bib_location = bibliography.0.location().unwrap(); let bib_location = bibliography.0.location().unwrap();
let ref_location = |target: &Entry| { let ref_location = |target: &Entry| {
@ -587,16 +599,12 @@ fn create(
/// Load bibliography entries from a path. /// Load bibliography entries from a path.
#[comemo::memoize] #[comemo::memoize]
fn load( fn load(paths: &BibPaths, data: &[Bytes]) -> StrResult<EcoVec<hayagriva::Entry>> {
world: Tracked<dyn World + '_>,
paths: &BibPaths,
) -> StrResult<EcoVec<hayagriva::Entry>> {
let mut result = EcoVec::new(); let mut result = EcoVec::new();
// We might have multiple bib/yaml files // We might have multiple bib/yaml files
for path in &paths.0 { for (path, bytes) in paths.0.iter().zip(data) {
let buffer = world.file(Path::new(path.as_str()))?; let src = std::str::from_utf8(bytes).map_err(|_| FileError::InvalidUtf8)?;
let src = std::str::from_utf8(&buffer).map_err(|_| "file is not valid utf-8")?;
let entries = parse_bib(path, src)?; let entries = parse_bib(path, src)?;
result.extend(entries); result.extend(entries);
} }

View File

@ -189,7 +189,7 @@ impl Show for RefElem {
) )
}) })
.hint(eco_format!( .hint(eco_format!(
"did you mean to use `#set {}(numbering: \"1.\")`?", "you can enable heading numbering with `#set {}(numbering: \"1.\")`?",
elem.func().name() elem.func().name()
)) ))
.at(span)?; .at(span)?;

View File

@ -19,6 +19,8 @@ pub use typst::eval::{
Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm, Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm,
}; };
#[doc(no_inline)] #[doc(no_inline)]
pub use typst::file::FileId;
#[doc(no_inline)]
pub use typst::geom::*; pub use typst::geom::*;
#[doc(no_inline)] #[doc(no_inline)]
pub use typst::model::{ pub use typst::model::{

View File

@ -2,6 +2,7 @@ use std::ffi::OsStr;
use std::path::Path; use std::path::Path;
use typst::image::{Image, ImageFormat, RasterFormat, VectorFormat}; use typst::image::{Image, ImageFormat, RasterFormat, VectorFormat};
use typst::util::Bytes;
use crate::meta::{Figurable, LocalName}; use crate::meta::{Figurable, LocalName};
use crate::prelude::*; use crate::prelude::*;
@ -37,12 +38,18 @@ pub struct ImageElem {
#[parse( #[parse(
let Spanned { v: path, span } = let Spanned { v: path, span } =
args.expect::<Spanned<EcoString>>("path to image file")?; args.expect::<Spanned<EcoString>>("path to image file")?;
let path: EcoString = vm.locate(&path).at(span)?.to_string_lossy().into(); let id = vm.location().join(&path).at(span)?;
let _ = load(vm.world(), &path, None, None).at(span)?; let data = vm.world().file(id).at(span)?;
path path
)] )]
pub path: EcoString, pub path: EcoString,
/// The raw file data.
#[internal]
#[required]
#[parse(data)]
pub data: Bytes,
/// The width of the image. /// The width of the image.
pub width: Smart<Rel<Length>>, pub width: Smart<Rel<Length>>,
@ -65,10 +72,29 @@ impl Layout for ImageElem {
styles: StyleChain, styles: StyleChain,
regions: Regions, regions: Regions,
) -> SourceResult<Fragment> { ) -> SourceResult<Fragment> {
let first = families(styles).next(); let ext = Path::new(self.path().as_str())
let fallback_family = first.as_ref().map(|f| f.as_str()); .extension()
let image = .and_then(OsStr::to_str)
load(vt.world, &self.path(), fallback_family, self.alt(styles)).unwrap(); .unwrap_or_default()
.to_lowercase();
let format = match ext.as_str() {
"png" => ImageFormat::Raster(RasterFormat::Png),
"jpg" | "jpeg" => ImageFormat::Raster(RasterFormat::Jpg),
"gif" => ImageFormat::Raster(RasterFormat::Gif),
"svg" | "svgz" => ImageFormat::Vector(VectorFormat::Svg),
_ => bail!(self.span(), "unknown image format"),
};
let image = Image::with_fonts(
self.data(),
format,
vt.world,
families(styles).next().as_ref().map(|f| f.as_str()),
self.alt(styles),
)
.at(self.span())?;
let sizing = Axes::new(self.width(styles), self.height(styles)); let sizing = Axes::new(self.width(styles), self.height(styles));
let region = sizing let region = sizing
.zip(regions.base()) .zip(regions.base())
@ -169,24 +195,3 @@ pub enum ImageFit {
/// this means that the image will be distorted. /// this means that the image will be distorted.
Stretch, Stretch,
} }
/// Load an image from a path.
#[comemo::memoize]
fn load(
world: Tracked<dyn World + '_>,
full: &str,
fallback_family: Option<&str>,
alt: Option<EcoString>,
) -> StrResult<Image> {
let full = Path::new(full);
let buffer = world.file(full)?;
let ext = full.extension().and_then(OsStr::to_str).unwrap_or_default();
let format = match ext.to_lowercase().as_str() {
"png" => ImageFormat::Raster(RasterFormat::Png),
"jpg" | "jpeg" => ImageFormat::Raster(RasterFormat::Jpg),
"gif" => ImageFormat::Raster(RasterFormat::Gif),
"svg" | "svgz" => ImageFormat::Vector(VectorFormat::Svg),
_ => bail!("unknown image format"),
};
Image::with_fonts(buffer, format, world, fallback_family, alt)
}

View File

@ -2,14 +2,14 @@
use std::fmt::{self, Display, Formatter}; use std::fmt::{self, Display, Formatter};
use std::io; use std::io;
use std::ops::Range;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::Utf8Error; use std::str::Utf8Error;
use std::string::FromUtf8Error; use std::string::FromUtf8Error;
use comemo::Tracked; use comemo::Tracked;
use crate::syntax::{ErrorPos, Span, Spanned}; use crate::file::PackageSpec;
use crate::syntax::{Span, Spanned};
use crate::World; use crate::World;
/// Early-return with a [`StrResult`] or [`SourceResult`]. /// Early-return with a [`StrResult`] or [`SourceResult`].
@ -76,8 +76,6 @@ pub type SourceResult<T> = Result<T, Box<Vec<SourceError>>>;
pub struct SourceError { pub struct SourceError {
/// The span of the erroneous node in the source code. /// The span of the erroneous node in the source code.
pub span: Span, pub span: Span,
/// The position in the node where the error should be annotated.
pub pos: ErrorPos,
/// A diagnostic message describing the problem. /// A diagnostic message describing the problem.
pub message: EcoString, pub message: EcoString,
/// The trace of function calls leading to the error. /// The trace of function calls leading to the error.
@ -92,36 +90,17 @@ impl SourceError {
pub fn new(span: Span, message: impl Into<EcoString>) -> Self { pub fn new(span: Span, message: impl Into<EcoString>) -> Self {
Self { Self {
span, span,
pos: ErrorPos::Full,
trace: vec![], trace: vec![],
message: message.into(), message: message.into(),
hints: vec![], hints: vec![],
} }
} }
/// Adjust the position in the node where the error should be annotated.
pub fn with_pos(mut self, pos: ErrorPos) -> Self {
self.pos = pos;
self
}
/// Adds user-facing hints to the error. /// Adds user-facing hints to the error.
pub fn with_hints(mut self, hints: impl IntoIterator<Item = EcoString>) -> Self { pub fn with_hints(mut self, hints: impl IntoIterator<Item = EcoString>) -> Self {
self.hints.extend(hints); self.hints.extend(hints);
self self
} }
/// The range in the source file identified by
/// [`self.span.source()`](Span::source) where the error should be
/// annotated.
pub fn range(&self, world: &dyn World) -> Range<usize> {
let full = world.source(self.span.source()).range(self.span);
match self.pos {
ErrorPos::Full => full,
ErrorPos::Start => full.start..full.start,
ErrorPos::End => full.end..full.end,
}
}
} }
/// A part of an error's [trace](SourceError::trace). /// A part of an error's [trace](SourceError::trace).
@ -171,12 +150,17 @@ impl<T> Trace<T> for SourceResult<T> {
if span.is_detached() { if span.is_detached() {
return errors; return errors;
} }
let range = world.source(span.source()).range(span);
let trace_range = span.range(&*world);
for error in errors.iter_mut().filter(|e| !e.span.is_detached()) { for error in errors.iter_mut().filter(|e| !e.span.is_detached()) {
// Skip traces that surround the error. // Skip traces that surround the error.
let error_range = world.source(error.span.source()).range(error.span); if error.span.id() == span.id() {
if range.start <= error_range.start && range.end >= error_range.end { let error_range = error.span.range(&*world);
continue; if trace_range.start <= error_range.start
&& trace_range.end >= error_range.end
{
continue;
}
} }
error.trace.push(Spanned::new(make_point(), span)); error.trace.push(Spanned::new(make_point(), span));
@ -262,6 +246,8 @@ pub enum FileError {
NotSource, NotSource,
/// The file was not valid UTF-8, but should have been. /// The file was not valid UTF-8, but should have been.
InvalidUtf8, InvalidUtf8,
/// The package the file is part of could not be loaded.
Package(PackageError),
/// Another error. /// Another error.
Other, Other,
} }
@ -294,6 +280,7 @@ impl Display for FileError {
Self::IsDirectory => f.pad("failed to load file (is a directory)"), Self::IsDirectory => f.pad("failed to load file (is a directory)"),
Self::NotSource => f.pad("not a typst source file"), Self::NotSource => f.pad("not a typst source file"),
Self::InvalidUtf8 => f.pad("file is not valid utf-8"), Self::InvalidUtf8 => f.pad("file is not valid utf-8"),
Self::Package(error) => error.fmt(f),
Self::Other => f.pad("failed to load file"), Self::Other => f.pad("failed to load file"),
} }
} }
@ -311,12 +298,54 @@ impl From<FromUtf8Error> for FileError {
} }
} }
impl From<PackageError> for FileError {
fn from(error: PackageError) -> Self {
Self::Package(error)
}
}
impl From<FileError> for EcoString { impl From<FileError> for EcoString {
fn from(error: FileError) -> Self { fn from(error: FileError) -> Self {
eco_format!("{error}") eco_format!("{error}")
} }
} }
/// A result type with a package-related error.
pub type PackageResult<T> = Result<T, PackageError>;
/// An error that occured while trying to load a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum PackageError {
/// The specified package does not exist.
NotFound(PackageSpec),
/// Failed to retrieve the package through the network.
NetworkFailed,
/// The package archive was malformed.
MalformedArchive,
/// Another error.
Other,
}
impl std::error::Error for PackageError {}
impl Display for PackageError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::NotFound(spec) => {
write!(f, "package not found (searched for {spec})",)
}
Self::NetworkFailed => f.pad("failed to load package (network failed)"),
Self::MalformedArchive => f.pad("failed to load package (archive malformed)"),
Self::Other => f.pad("failed to load package"),
}
}
}
impl From<PackageError> for EcoString {
fn from(error: PackageError) -> Self {
eco_format!("{error}")
}
}
/// Format a user-facing error message for an XML-like file format. /// Format a user-facing error message for an XML-like file format.
pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString { pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString {
match error { match error {

View File

@ -11,9 +11,10 @@ use super::{
Value, Vm, Value, Vm,
}; };
use crate::diag::{bail, SourceResult, StrResult}; use crate::diag::{bail, SourceResult, StrResult};
use crate::file::FileId;
use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt}; use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt};
use crate::syntax::ast::{self, AstNode, Expr, Ident}; use crate::syntax::ast::{self, AstNode, Expr, Ident};
use crate::syntax::{SourceId, Span, SyntaxNode}; use crate::syntax::{Span, SyntaxNode};
use crate::World; use crate::World;
/// An evaluatable function. /// An evaluatable function.
@ -125,7 +126,6 @@ impl Func {
args: impl IntoIterator<Item = T>, args: impl IntoIterator<Item = T>,
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let route = Route::default(); let route = Route::default();
let id = SourceId::detached();
let scopes = Scopes::new(None); let scopes = Scopes::new(None);
let mut locator = Locator::chained(vt.locator.track()); let mut locator = Locator::chained(vt.locator.track());
let vt = Vt { let vt = Vt {
@ -135,7 +135,7 @@ impl Func {
delayed: TrackedMut::reborrow_mut(&mut vt.delayed), delayed: TrackedMut::reborrow_mut(&mut vt.delayed),
tracer: TrackedMut::reborrow_mut(&mut vt.tracer), tracer: TrackedMut::reborrow_mut(&mut vt.tracer),
}; };
let mut vm = Vm::new(vt, route.track(), id, scopes); let mut vm = Vm::new(vt, route.track(), FileId::detached(), scopes);
let args = Args::new(self.span(), args); let args = Args::new(self.span(), args);
self.call_vm(&mut vm, args) self.call_vm(&mut vm, args)
} }
@ -297,7 +297,7 @@ pub struct ParamInfo {
#[derive(Hash)] #[derive(Hash)]
pub(super) struct Closure { pub(super) struct Closure {
/// The source file where the closure was defined. /// The source file where the closure was defined.
pub location: SourceId, pub location: FileId,
/// The name of the closure. /// The name of the closure.
pub name: Option<Ident>, pub name: Option<Ident>,
/// Captured values from outer scopes. /// Captured values from outer scopes.

View File

@ -13,7 +13,6 @@ use crate::geom::{Abs, Dir};
use crate::model::{Content, ElemFunc, Introspector, Label, StyleChain, Styles, Vt}; use crate::model::{Content, ElemFunc, Introspector, Label, StyleChain, Styles, Vt};
use crate::syntax::Span; use crate::syntax::Span;
use crate::util::hash128; use crate::util::hash128;
use crate::World;
/// Definition of Typst's standard library. /// Definition of Typst's standard library.
#[derive(Debug, Clone, Hash)] #[derive(Debug, Clone, Hash)]
@ -66,10 +65,8 @@ pub struct LangItems {
pub reference: fn(target: Label, supplement: Option<Content>) -> Content, pub reference: fn(target: Label, supplement: Option<Content>) -> Content,
/// The keys contained in the bibliography and short descriptions of them. /// The keys contained in the bibliography and short descriptions of them.
#[allow(clippy::type_complexity)] #[allow(clippy::type_complexity)]
pub bibliography_keys: fn( pub bibliography_keys:
world: Tracked<dyn World + '_>, fn(introspector: Tracked<Introspector>) -> Vec<(EcoString, Option<EcoString>)>,
introspector: Tracked<Introspector>,
) -> Vec<(EcoString, Option<EcoString>)>,
/// A section heading: `= Introduction`. /// A section heading: `= Introduction`.
pub heading: fn(level: NonZeroUsize, body: Content) -> Content, pub heading: fn(level: NonZeroUsize, body: Content) -> Content,
/// The heading function. /// The heading function.

View File

@ -55,27 +55,24 @@ pub use self::value::{Dynamic, Type, Value};
use std::collections::HashSet; use std::collections::HashSet;
use std::mem; use std::mem;
use std::path::{Path, PathBuf}; use std::path::Path;
use comemo::{Track, Tracked, TrackedMut, Validate}; use comemo::{Track, Tracked, TrackedMut, Validate};
use ecow::{EcoString, EcoVec}; use ecow::{EcoString, EcoVec};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use self::func::{CapturesVisitor, Closure}; use self::func::{CapturesVisitor, Closure};
use crate::diag::{
bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint,
};
use crate::file::{FileId, PackageManifest, PackageSpec};
use crate::model::{ use crate::model::{
Content, Introspector, Label, Locator, Recipe, ShowableSelector, Styles, Transform, Content, DelayedErrors, Introspector, Label, Locator, Recipe, ShowableSelector,
Unlabellable, Vt, Styles, Transform, Unlabellable, Vt,
}; };
use crate::syntax::ast::AstNode; use crate::syntax::ast::{self, AstNode};
use crate::syntax::{ use crate::syntax::{parse_code, Source, Span, Spanned, SyntaxKind, SyntaxNode};
ast, parse_code, Source, SourceId, Span, Spanned, SyntaxKind, SyntaxNode,
};
use crate::util::PathExt;
use crate::World; use crate::World;
use crate::{
diag::{bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint},
model::DelayedErrors,
};
const MAX_ITERATIONS: usize = 10_000; const MAX_ITERATIONS: usize = 10_000;
const MAX_CALL_DEPTH: usize = 64; const MAX_CALL_DEPTH: usize = 64;
@ -91,9 +88,8 @@ pub fn eval(
) -> SourceResult<Module> { ) -> SourceResult<Module> {
// Prevent cyclic evaluation. // Prevent cyclic evaluation.
let id = source.id(); let id = source.id();
let path = if id.is_detached() { Path::new("") } else { world.source(id).path() };
if route.contains(id) { if route.contains(id) {
panic!("Tried to cyclicly evaluate {}", path.display()); panic!("Tried to cyclicly evaluate {}", id.path().display());
} }
// Hook up the lang items. // Hook up the lang items.
@ -130,7 +126,7 @@ pub fn eval(
} }
// Assemble the module. // Assemble the module.
let name = path.file_stem().unwrap_or_default().to_string_lossy(); let name = id.path().file_stem().unwrap_or_default().to_string_lossy();
Ok(Module::new(name).with_scope(vm.scopes.top).with_content(result?)) Ok(Module::new(name).with_scope(vm.scopes.top).with_content(result?))
} }
@ -166,7 +162,7 @@ pub fn eval_string(
// Prepare VM. // Prepare VM.
let route = Route::default(); let route = Route::default();
let id = SourceId::detached(); let id = FileId::detached();
let scopes = Scopes::new(Some(world.library())); let scopes = Scopes::new(Some(world.library()));
let mut vm = Vm::new(vt, route.track(), id, scopes); let mut vm = Vm::new(vt, route.track(), id, scopes);
@ -194,7 +190,7 @@ pub struct Vm<'a> {
/// The route of source ids the VM took to reach its current location. /// The route of source ids the VM took to reach its current location.
route: Tracked<'a, Route<'a>>, route: Tracked<'a, Route<'a>>,
/// The current location. /// The current location.
location: SourceId, location: FileId,
/// A control flow event that is currently happening. /// A control flow event that is currently happening.
flow: Option<FlowEvent>, flow: Option<FlowEvent>,
/// The stack of scopes. /// The stack of scopes.
@ -210,7 +206,7 @@ impl<'a> Vm<'a> {
fn new( fn new(
vt: Vt<'a>, vt: Vt<'a>,
route: Tracked<'a, Route>, route: Tracked<'a, Route>,
location: SourceId, location: FileId,
scopes: Scopes<'a>, scopes: Scopes<'a>,
) -> Self { ) -> Self {
let traced = vt.tracer.span(location); let traced = vt.tracer.span(location);
@ -232,6 +228,11 @@ impl<'a> Vm<'a> {
self.vt.world self.vt.world
} }
/// The location to which paths are relative currently.
pub fn location(&self) -> FileId {
self.location
}
/// Define a variable in the current scope. /// Define a variable in the current scope.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub fn define(&mut self, var: ast::Ident, value: impl IntoValue) { pub fn define(&mut self, var: ast::Ident, value: impl IntoValue) {
@ -241,23 +242,6 @@ impl<'a> Vm<'a> {
} }
self.scopes.top.define(var.take(), value); self.scopes.top.define(var.take(), value);
} }
/// Resolve a user-entered path to be relative to the compilation
/// environment's root.
#[tracing::instrument(skip_all)]
pub fn locate(&self, path: &str) -> StrResult<PathBuf> {
if !self.location.is_detached() {
if let Some(path) = path.strip_prefix('/') {
return Ok(self.world().root().join(path).normalize());
}
if let Some(dir) = self.world().source(self.location).path().parent() {
return Ok(dir.join(path).normalize());
}
}
bail!("cannot access file system from here")
}
} }
/// A control flow event that occurred during evaluation. /// A control flow event that occurred during evaluation.
@ -296,12 +280,12 @@ pub struct Route<'a> {
// covariant over the constraint. If it becomes invariant, we're in for a // covariant over the constraint. If it becomes invariant, we're in for a
// world of lifetime pain. // world of lifetime pain.
outer: Option<Tracked<'a, Self, <Route<'static> as Validate>::Constraint>>, outer: Option<Tracked<'a, Self, <Route<'static> as Validate>::Constraint>>,
id: Option<SourceId>, id: Option<FileId>,
} }
impl<'a> Route<'a> { impl<'a> Route<'a> {
/// Create a new route with just one entry. /// Create a new route with just one entry.
pub fn new(id: SourceId) -> Self { pub fn new(id: FileId) -> Self {
Self { id: Some(id), outer: None } Self { id: Some(id), outer: None }
} }
@ -309,7 +293,7 @@ impl<'a> Route<'a> {
/// ///
/// You must guarantee that `outer` lives longer than the resulting /// You must guarantee that `outer` lives longer than the resulting
/// route is ever used. /// route is ever used.
pub fn insert(outer: Tracked<'a, Self>, id: SourceId) -> Self { pub fn insert(outer: Tracked<'a, Self>, id: FileId) -> Self {
Route { outer: Some(outer), id: Some(id) } Route { outer: Some(outer), id: Some(id) }
} }
@ -328,7 +312,7 @@ impl<'a> Route<'a> {
#[comemo::track] #[comemo::track]
impl<'a> Route<'a> { impl<'a> Route<'a> {
/// Whether the given id is part of the route. /// Whether the given id is part of the route.
fn contains(&self, id: SourceId) -> bool { fn contains(&self, id: FileId) -> bool {
self.id == Some(id) || self.outer.map_or(false, |outer| outer.contains(id)) self.id == Some(id) || self.outer.map_or(false, |outer| outer.contains(id))
} }
} }
@ -358,8 +342,8 @@ impl Tracer {
#[comemo::track] #[comemo::track]
impl Tracer { impl Tracer {
/// The traced span if it is part of the given source file. /// The traced span if it is part of the given source file.
fn span(&self, id: SourceId) -> Option<Span> { fn span(&self, id: FileId) -> Option<Span> {
if self.span.map(Span::source) == Some(id) { if self.span.map(Span::id) == Some(id) {
self.span self.span
} else { } else {
None None
@ -1764,20 +1748,49 @@ fn import(
} }
}; };
// Handle package and file imports.
let path = path.as_str();
if path.starts_with('@') {
let spec = path.parse::<PackageSpec>().at(span)?;
import_package(vm, spec, span)
} else {
import_file(vm, path, span)
}
}
/// Import an external package.
fn import_package(vm: &mut Vm, spec: PackageSpec, span: Span) -> SourceResult<Module> {
// Evaluate the manifest.
let manifest_id = FileId::new(Some(spec.clone()), Path::new("/typst.toml"));
let bytes = vm.world().file(manifest_id).at(span)?;
let manifest = PackageManifest::parse(&bytes).at(span)?;
manifest.validate(&spec).at(span)?;
// Evaluate the entry point.
let entrypoint = Path::new("/").join(manifest.package.entrypoint.as_str());
let entrypoint_id = FileId::new(Some(spec), &entrypoint);
let source = vm.world().source(entrypoint_id).at(span)?;
let point = || Tracepoint::Import;
Ok(eval(vm.world(), vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), &source)
.trace(vm.world(), point, span)?
.with_name(manifest.package.name))
}
/// Import a file from a path.
fn import_file(vm: &mut Vm, path: &str, span: Span) -> SourceResult<Module> {
// Load the source file. // Load the source file.
let world = vm.world(); let world = vm.world();
let full = vm.locate(&path).at(span)?; let id = vm.location().join(path).at(span)?;
let id = world.resolve(&full).at(span)?; let source = world.source(id).at(span)?;
// Prevent cyclic importing. // Prevent cyclic importing.
if vm.route.contains(id) { if vm.route.contains(source.id()) {
bail!(span, "cyclic import"); bail!(span, "cyclic import");
} }
// Evaluate the file. // Evaluate the file.
let source = world.source(id);
let point = || Tracepoint::Import; let point = || Tracepoint::Import;
eval(world, vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), source) eval(world, vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), &source)
.trace(world, point, span) .trace(world, point, span)
} }

View File

@ -7,15 +7,20 @@ use super::{Content, Scope, Value};
use crate::diag::StrResult; use crate::diag::StrResult;
/// An evaluated module, ready for importing or typesetting. /// An evaluated module, ready for importing or typesetting.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone, Hash)] #[derive(Clone, Hash)]
#[allow(clippy::derived_hash_with_manual_eq)] #[allow(clippy::derived_hash_with_manual_eq)]
pub struct Module(Arc<Repr>); pub struct Module {
/// The module's name.
name: EcoString,
/// The reference-counted inner fields.
inner: Arc<Repr>,
}
/// The internal representation. /// The internal representation.
#[derive(Clone, Hash)] #[derive(Clone, Hash)]
struct Repr { struct Repr {
/// The module's name.
name: EcoString,
/// The top-level definitions that were bound in this module. /// The top-level definitions that were bound in this module.
scope: Scope, scope: Scope,
/// The module's layoutable contents. /// The module's layoutable contents.
@ -25,38 +30,43 @@ struct Repr {
impl Module { impl Module {
/// Create a new module. /// Create a new module.
pub fn new(name: impl Into<EcoString>) -> Self { pub fn new(name: impl Into<EcoString>) -> Self {
Self(Arc::new(Repr { Self {
name: name.into(), name: name.into(),
scope: Scope::new(), inner: Arc::new(Repr { scope: Scope::new(), content: Content::empty() }),
content: Content::empty(), }
})) }
/// Update the module's name.
pub fn with_name(mut self, name: impl Into<EcoString>) -> Self {
self.name = name.into();
self
} }
/// Update the module's scope. /// Update the module's scope.
pub fn with_scope(mut self, scope: Scope) -> Self { pub fn with_scope(mut self, scope: Scope) -> Self {
Arc::make_mut(&mut self.0).scope = scope; Arc::make_mut(&mut self.inner).scope = scope;
self self
} }
/// Update the module's content. /// Update the module's content.
pub fn with_content(mut self, content: Content) -> Self { pub fn with_content(mut self, content: Content) -> Self {
Arc::make_mut(&mut self.0).content = content; Arc::make_mut(&mut self.inner).content = content;
self self
} }
/// Get the module's name. /// Get the module's name.
pub fn name(&self) -> &EcoString { pub fn name(&self) -> &EcoString {
&self.0.name &self.name
} }
/// Access the module's scope. /// Access the module's scope.
pub fn scope(&self) -> &Scope { pub fn scope(&self) -> &Scope {
&self.0.scope &self.inner.scope
} }
/// Access the module's scope, mutably. /// Access the module's scope, mutably.
pub fn scope_mut(&mut self) -> &mut Scope { pub fn scope_mut(&mut self) -> &mut Scope {
&mut Arc::make_mut(&mut self.0).scope &mut Arc::make_mut(&mut self.inner).scope
} }
/// Try to access a definition in the module. /// Try to access a definition in the module.
@ -68,7 +78,7 @@ impl Module {
/// Extract the module's content. /// Extract the module's content.
pub fn content(self) -> Content { pub fn content(self) -> Content {
match Arc::try_unwrap(self.0) { match Arc::try_unwrap(self.inner) {
Ok(repr) => repr.content, Ok(repr) => repr.content,
Err(arc) => arc.content.clone(), Err(arc) => arc.content.clone(),
} }
@ -83,6 +93,6 @@ impl Debug for Module {
impl PartialEq for Module { impl PartialEq for Module {
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0) self.name == other.name && Arc::ptr_eq(&self.inner, &other.inner)
} }
} }

View File

@ -15,9 +15,10 @@ use crate::diag::StrResult;
use crate::geom::{Abs, Angle, Color, Em, Fr, Length, Ratio, Rel}; use crate::geom::{Abs, Angle, Color, Em, Fr, Length, Ratio, Rel};
use crate::model::{Label, Styles}; use crate::model::{Label, Styles};
use crate::syntax::{ast, Span}; use crate::syntax::{ast, Span};
use crate::util::Bytes;
/// A computational value. /// A computational value.
#[derive(Clone, Default)] #[derive(Default, Clone)]
pub enum Value { pub enum Value {
/// The value that indicates the absence of a meaningful value. /// The value that indicates the absence of a meaningful value.
#[default] #[default]
@ -46,6 +47,8 @@ pub enum Value {
Symbol(Symbol), Symbol(Symbol),
/// A string: `"string"`. /// A string: `"string"`.
Str(Str), Str(Str),
/// Raw bytes.
Bytes(Bytes),
/// A label: `<intro>`. /// A label: `<intro>`.
Label(Label), Label(Label),
/// A content value: `[*Hi* there]`. /// A content value: `[*Hi* there]`.
@ -103,6 +106,7 @@ impl Value {
Self::Color(_) => Color::TYPE_NAME, Self::Color(_) => Color::TYPE_NAME,
Self::Symbol(_) => Symbol::TYPE_NAME, Self::Symbol(_) => Symbol::TYPE_NAME,
Self::Str(_) => Str::TYPE_NAME, Self::Str(_) => Str::TYPE_NAME,
Self::Bytes(_) => Bytes::TYPE_NAME,
Self::Label(_) => Label::TYPE_NAME, Self::Label(_) => Label::TYPE_NAME,
Self::Content(_) => Content::TYPE_NAME, Self::Content(_) => Content::TYPE_NAME,
Self::Styles(_) => Styles::TYPE_NAME, Self::Styles(_) => Styles::TYPE_NAME,
@ -186,6 +190,7 @@ impl Debug for Value {
Self::Color(v) => Debug::fmt(v, f), Self::Color(v) => Debug::fmt(v, f),
Self::Symbol(v) => Debug::fmt(v, f), Self::Symbol(v) => Debug::fmt(v, f),
Self::Str(v) => Debug::fmt(v, f), Self::Str(v) => Debug::fmt(v, f),
Self::Bytes(v) => Debug::fmt(v, f),
Self::Label(v) => Debug::fmt(v, f), Self::Label(v) => Debug::fmt(v, f),
Self::Content(v) => Debug::fmt(v, f), Self::Content(v) => Debug::fmt(v, f),
Self::Styles(v) => Debug::fmt(v, f), Self::Styles(v) => Debug::fmt(v, f),
@ -228,6 +233,7 @@ impl Hash for Value {
Self::Color(v) => v.hash(state), Self::Color(v) => v.hash(state),
Self::Symbol(v) => v.hash(state), Self::Symbol(v) => v.hash(state),
Self::Str(v) => v.hash(state), Self::Str(v) => v.hash(state),
Self::Bytes(v) => v.hash(state),
Self::Label(v) => v.hash(state), Self::Label(v) => v.hash(state),
Self::Content(v) => v.hash(state), Self::Content(v) => v.hash(state),
Self::Styles(v) => v.hash(state), Self::Styles(v) => v.hash(state),
@ -400,6 +406,7 @@ primitive! {
Str, Str,
Symbol(symbol) => symbol.get().into() Symbol(symbol) => symbol.get().into()
} }
primitive! { Bytes: "bytes", Bytes }
primitive! { Label: "label", Label } primitive! { Label: "label", Label }
primitive! { Content: "content", primitive! { Content: "content",
Content, Content,

View File

@ -8,7 +8,7 @@ use unicode_general_category::GeneralCategory;
use super::{deflate, EmExt, PdfContext, RefExt}; use super::{deflate, EmExt, PdfContext, RefExt};
use crate::font::Font; use crate::font::Font;
use crate::util::{Buffer, SliceExt}; use crate::util::{Bytes, SliceExt};
const CMAP_NAME: Name = Name(b"Custom"); const CMAP_NAME: Name = Name(b"Custom");
const SYSTEM_INFO: SystemInfo = SystemInfo { const SYSTEM_INFO: SystemInfo = SystemInfo {
@ -154,7 +154,7 @@ pub fn write_fonts(ctx: &mut PdfContext) {
/// Subset a font to the given glyphs. /// Subset a font to the given glyphs.
#[comemo::memoize] #[comemo::memoize]
fn subset_font(font: &Font, glyphs: &[u16]) -> Buffer { fn subset_font(font: &Font, glyphs: &[u16]) -> Bytes {
let data = font.data(); let data = font.data();
let profile = subsetter::Profile::pdf(glyphs); let profile = subsetter::Profile::pdf(glyphs);
let subsetted = subsetter::subset(data, font.index(), profile); let subsetted = subsetter::subset(data, font.index(), profile);

View File

@ -5,7 +5,7 @@ use pdf_writer::{Filter, Finish};
use super::{deflate, PdfContext, RefExt}; use super::{deflate, PdfContext, RefExt};
use crate::image::{DecodedImage, Image, RasterFormat}; use crate::image::{DecodedImage, Image, RasterFormat};
use crate::util::Buffer; use crate::util::Bytes;
/// Embed all used images into the PDF. /// Embed all used images into the PDF.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
@ -89,7 +89,7 @@ pub fn write_images(ctx: &mut PdfContext) {
/// Skips the alpha channel as that's encoded separately. /// Skips the alpha channel as that's encoded separately.
#[comemo::memoize] #[comemo::memoize]
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
fn encode_image(image: &Image) -> (Buffer, Filter, bool) { fn encode_image(image: &Image) -> (Bytes, Filter, bool) {
let decoded = image.decoded(); let decoded = image.decoded();
let (dynamic, format) = match decoded.as_ref() { let (dynamic, format) = match decoded.as_ref() {
DecodedImage::Raster(dynamic, _, format) => (dynamic, *format), DecodedImage::Raster(dynamic, _, format) => (dynamic, *format),

285
src/file.rs Normal file
View File

@ -0,0 +1,285 @@
//! File and package management.
use std::collections::HashMap;
use std::fmt::{self, Debug, Display, Formatter};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::RwLock;
use ecow::{eco_format, EcoString};
use once_cell::sync::Lazy;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::diag::{bail, FileError, StrResult};
use crate::syntax::is_ident;
use crate::util::PathExt;
/// The global package-path interner.
static INTERNER: Lazy<RwLock<Interner>> =
Lazy::new(|| RwLock::new(Interner { to_id: HashMap::new(), from_id: Vec::new() }));
/// A package-path interner.
struct Interner {
to_id: HashMap<Pair, FileId>,
from_id: Vec<Pair>,
}
/// An interned pair of a package specification and a path.
type Pair = &'static (Option<PackageSpec>, PathBuf);
/// Identifies a file.
///
/// This type is interned and thus cheap to clone, compare, and hash.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct FileId(u16);
impl FileId {
/// Create a new interned file specification.
///
/// Normalizes the path before interning.
pub fn new(package: Option<PackageSpec>, path: &Path) -> Self {
let pair = (package, path.normalize());
let mut interner = INTERNER.write().unwrap();
interner.to_id.get(&pair).copied().unwrap_or_else(|| {
let leaked = Box::leak(Box::new(pair));
let len = interner.from_id.len();
if len >= usize::from(u16::MAX) {
panic!("too many file specifications");
}
let id = FileId(len as u16);
interner.to_id.insert(leaked, id);
interner.from_id.push(leaked);
id
})
}
/// Get an id that does not identify any real file.
pub const fn detached() -> Self {
Self(u16::MAX)
}
/// Whether the id is the detached.
pub const fn is_detached(self) -> bool {
self.0 == Self::detached().0
}
/// The package the file resides in, if any.
pub fn package(&self) -> Option<&'static PackageSpec> {
if self.is_detached() {
None
} else {
self.pair().0.as_ref()
}
}
/// The normalized path to the file (within the package if there's a
/// package).
pub fn path(&self) -> &'static Path {
if self.is_detached() {
Path::new("<detached>")
} else {
&self.pair().1
}
}
/// Resolve a file location relative to this file.
pub fn join(self, path: &str) -> StrResult<Self> {
if self.is_detached() {
bail!("cannot access file system from here");
}
let package = self.package().cloned();
let base = self.path();
Ok(if let Some(parent) = base.parent() {
Self::new(package, &parent.join(path))
} else {
Self::new(package, Path::new(path))
})
}
/// Construct from a raw number.
pub(crate) const fn from_u16(v: u16) -> Self {
Self(v)
}
/// Extract the raw underlying number.
pub(crate) const fn as_u16(self) -> u16 {
self.0
}
/// Get the static pair.
fn pair(&self) -> Pair {
INTERNER.read().unwrap().from_id[usize::from(self.0)]
}
}
impl Display for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let path = self.path().display();
match self.package() {
Some(package) => write!(f, "{package}/{path}"),
None => write!(f, "{path}"),
}
}
}
impl Debug for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
/// Identifies a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PackageSpec {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
}
impl FromStr for PackageSpec {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut s = unscanny::Scanner::new(s);
if !s.eat_if('@') {
bail!("package specification must start with '@'");
}
let namespace = s.eat_until('/');
if namespace.is_empty() {
bail!("package specification is missing namespace");
} else if !is_ident(namespace) {
bail!("`{namespace}` is not a valid package namespace");
}
s.eat_if('/');
let name = s.eat_until(':');
if name.is_empty() {
bail!("package specification is missing name");
} else if !is_ident(name) {
bail!("`{name}` is not a valid package name");
}
s.eat_if(':');
let version = s.after();
if version.is_empty() {
bail!("package specification is missing version");
}
Ok(Self {
namespace: namespace.into(),
name: name.into(),
version: version.parse()?,
})
}
}
impl Display for PackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "@{}/{}:{}", self.namespace, self.name, self.version)
}
}
/// A package's version.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Version {
/// The package's major version.
pub major: u32,
/// The package's minor version.
pub minor: u32,
/// The package's patch version.
pub patch: u32,
}
impl FromStr for Version {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.');
let mut next = |kind| {
let Some(part) = parts.next().filter(|s| !s.is_empty()) else {
bail!("version number is missing {kind} version");
};
part.parse::<u32>()
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
};
let major = next("major")?;
let minor = next("minor")?;
let patch = next("patch")?;
if let Some(rest) = parts.next() {
bail!("version number has unexpected fourth component: `{rest}`");
}
Ok(Self { major, minor, patch })
}
}
impl Display for Version {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl Serialize for Version {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.collect_str(self)
}
}
impl<'de> Deserialize<'de> for Version {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let string = EcoString::deserialize(d)?;
string.parse().map_err(serde::de::Error::custom)
}
}
/// A parsed package manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageManifest {
/// Details about the package itself.
pub package: PackageInfo,
}
impl PackageManifest {
/// Parse the manifest from raw bytes.
pub fn parse(bytes: &[u8]) -> StrResult<Self> {
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
toml::from_str(string).map_err(|err| {
eco_format!("package manifest is malformed: {}", err.message())
})
}
/// Ensure that this manifest is indeed for the specified package.
pub fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
if self.package.name != spec.name {
bail!("package manifest contains mismatched name `{}`", self.package.name);
}
if self.package.version != spec.version {
bail!(
"package manifest contains mismatched version {}",
self.package.version
);
}
Ok(())
}
}
/// The `package` key in the manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageInfo {
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
/// The path of the entrypoint into the package.
pub entrypoint: EcoString,
}

View File

@ -15,9 +15,11 @@ use ttf_parser::GlyphId;
use self::book::find_name; use self::book::find_name;
use crate::eval::Cast; use crate::eval::Cast;
use crate::geom::Em; use crate::geom::Em;
use crate::util::Buffer; use crate::util::Bytes;
/// An OpenType font. /// An OpenType font.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone)] #[derive(Clone)]
pub struct Font(Arc<Repr>); pub struct Font(Arc<Repr>);
@ -26,7 +28,7 @@ struct Repr {
/// The raw font data, possibly shared with other fonts from the same /// The raw font data, possibly shared with other fonts from the same
/// collection. The vector's allocation must not move, because `ttf` points /// collection. The vector's allocation must not move, because `ttf` points
/// into it using unsafe code. /// into it using unsafe code.
data: Buffer, data: Bytes,
/// The font's index in the buffer. /// The font's index in the buffer.
index: u32, index: u32,
/// Metadata about the font. /// Metadata about the font.
@ -41,7 +43,7 @@ struct Repr {
impl Font { impl Font {
/// Parse a font from data and collection index. /// Parse a font from data and collection index.
pub fn new(data: Buffer, index: u32) -> Option<Self> { pub fn new(data: Bytes, index: u32) -> Option<Self> {
// Safety: // Safety:
// - The slices's location is stable in memory: // - The slices's location is stable in memory:
// - We don't move the underlying vector // - We don't move the underlying vector
@ -60,13 +62,13 @@ impl Font {
} }
/// Parse all fonts in the given data. /// Parse all fonts in the given data.
pub fn iter(data: Buffer) -> impl Iterator<Item = Self> { pub fn iter(data: Bytes) -> impl Iterator<Item = Self> {
let count = ttf_parser::fonts_in_collection(&data).unwrap_or(1); let count = ttf_parser::fonts_in_collection(&data).unwrap_or(1);
(0..count).filter_map(move |index| Self::new(data.clone(), index)) (0..count).filter_map(move |index| Self::new(data.clone(), index))
} }
/// The underlying buffer. /// The underlying buffer.
pub fn data(&self) -> &Buffer { pub fn data(&self) -> &Bytes {
&self.0.data &self.0.data
} }

View File

@ -1,5 +1,3 @@
use std::path::PathBuf;
use comemo::Track; use comemo::Track;
use ecow::EcoString; use ecow::EcoString;
@ -7,7 +5,6 @@ use crate::doc::Frame;
use crate::eval::{eval, Module, Route, Tracer, Value}; use crate::eval::{eval, Module, Route, Tracer, Value};
use crate::model::{Introspector, Label}; use crate::model::{Introspector, Label};
use crate::syntax::{ast, LinkedNode, Source, SyntaxKind}; use crate::syntax::{ast, LinkedNode, Source, SyntaxKind};
use crate::util::PathExt;
use crate::World; use crate::World;
/// Try to determine a set of possible values for an expression. /// Try to determine a set of possible values for an expression.
@ -42,7 +39,7 @@ pub fn analyze_expr(world: &(dyn World + 'static), node: &LinkedNode) -> Vec<Val
world.track(), world.track(),
route.track(), route.track(),
tracer.track_mut(), tracer.track_mut(),
world.main(), &world.main(),
) )
.and_then(|module| { .and_then(|module| {
typst::model::typeset( typst::model::typeset(
@ -66,18 +63,11 @@ pub fn analyze_import(
source: &Source, source: &Source,
path: &str, path: &str,
) -> Option<Module> { ) -> Option<Module> {
let full: PathBuf = if let Some(path) = path.strip_prefix('/') {
world.root().join(path).normalize()
} else if let Some(dir) = source.path().parent() {
dir.join(path).normalize()
} else {
path.into()
};
let route = Route::default(); let route = Route::default();
let mut tracer = Tracer::default(); let mut tracer = Tracer::default();
let id = world.resolve(&full).ok()?; let id = source.id().join(path).ok()?;
let source = world.source(id); let source = world.source(id).ok()?;
eval(world.track(), route.track(), tracer.track_mut(), source).ok() eval(world.track(), route.track(), tracer.track_mut(), &source).ok()
} }
/// Find all labels and details for them. /// Find all labels and details for them.
@ -112,7 +102,7 @@ pub fn analyze_labels(
let split = output.len(); let split = output.len();
// Bibliography keys. // Bibliography keys.
for (key, detail) in (items.bibliography_keys)(world.track(), introspector.track()) { for (key, detail) in (items.bibliography_keys)(introspector.track()) {
output.push((Label(key), detail)); output.push((Label(key), detail));
} }

View File

@ -3,16 +3,17 @@ use std::num::NonZeroUsize;
use ecow::EcoString; use ecow::EcoString;
use crate::doc::{Destination, Frame, FrameItem, Meta, Position}; use crate::doc::{Destination, Frame, FrameItem, Meta, Position};
use crate::file::FileId;
use crate::geom::{Geometry, Point, Size}; use crate::geom::{Geometry, Point, Size};
use crate::model::Introspector; use crate::model::Introspector;
use crate::syntax::{LinkedNode, Source, SourceId, Span, SyntaxKind}; use crate::syntax::{LinkedNode, Source, Span, SyntaxKind};
use crate::World; use crate::World;
/// Where to [jump](jump_from_click) to. /// Where to [jump](jump_from_click) to.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum Jump { pub enum Jump {
/// Jump to a position in a source file. /// Jump to a position in a source file.
Source(SourceId, usize), Source(FileId, usize),
/// Jump to an external URL. /// Jump to an external URL.
Url(EcoString), Url(EcoString),
/// Jump to a point on a page. /// Jump to a point on a page.
@ -21,9 +22,9 @@ pub enum Jump {
impl Jump { impl Jump {
fn from_span(world: &dyn World, span: Span) -> Option<Self> { fn from_span(world: &dyn World, span: Span) -> Option<Self> {
let source = world.source(span.source()); let source = world.source(span.id()).ok()?;
let node = source.find(span)?; let node = source.find(span)?;
Some(Self::Source(source.id(), node.offset())) Some(Self::Source(span.id(), node.offset()))
} }
} }
@ -78,7 +79,7 @@ pub fn jump_from_click(
Size::new(width, text.size), Size::new(width, text.size),
click, click,
) { ) {
let source = world.source(span.source()); let source = world.source(span.id()).ok()?;
let node = source.find(span)?; let node = source.find(span)?;
let pos = if node.kind() == SyntaxKind::Text { let pos = if node.kind() == SyntaxKind::Text {
let range = node.range(); let range = node.range();

View File

@ -18,7 +18,7 @@ use usvg::{TreeParsing, TreeTextToPath};
use crate::diag::{format_xml_like_error, StrResult}; use crate::diag::{format_xml_like_error, StrResult};
use crate::font::Font; use crate::font::Font;
use crate::geom::Axes; use crate::geom::Axes;
use crate::util::Buffer; use crate::util::Bytes;
use crate::World; use crate::World;
/// A raster or vector image. /// A raster or vector image.
@ -31,7 +31,7 @@ pub struct Image(Arc<Prehashed<Repr>>);
#[derive(Hash)] #[derive(Hash)]
struct Repr { struct Repr {
/// The raw, undecoded image data. /// The raw, undecoded image data.
data: Buffer, data: Bytes,
/// The format of the encoded `buffer`. /// The format of the encoded `buffer`.
format: ImageFormat, format: ImageFormat,
/// The size of the image. /// The size of the image.
@ -47,7 +47,7 @@ impl Image {
/// Create an image from a buffer and a format. /// Create an image from a buffer and a format.
#[comemo::memoize] #[comemo::memoize]
pub fn new( pub fn new(
data: Buffer, data: Bytes,
format: ImageFormat, format: ImageFormat,
alt: Option<EcoString>, alt: Option<EcoString>,
) -> StrResult<Self> { ) -> StrResult<Self> {
@ -71,7 +71,7 @@ impl Image {
/// Create a font-dependant image from a buffer and a format. /// Create a font-dependant image from a buffer and a format.
#[comemo::memoize] #[comemo::memoize]
pub fn with_fonts( pub fn with_fonts(
data: Buffer, data: Bytes,
format: ImageFormat, format: ImageFormat,
world: Tracked<dyn World + '_>, world: Tracked<dyn World + '_>,
fallback_family: Option<&str>, fallback_family: Option<&str>,
@ -95,7 +95,7 @@ impl Image {
} }
/// The raw image data. /// The raw image data.
pub fn data(&self) -> &Buffer { pub fn data(&self) -> &Bytes {
&self.0.data &self.0.data
} }
@ -234,7 +234,7 @@ pub struct IccProfile(pub Vec<u8>);
/// Decode a raster image. /// Decode a raster image.
#[comemo::memoize] #[comemo::memoize]
fn decode_raster(data: &Buffer, format: RasterFormat) -> StrResult<Arc<DecodedImage>> { fn decode_raster(data: &Bytes, format: RasterFormat) -> StrResult<Arc<DecodedImage>> {
fn decode_with<'a, T: ImageDecoder<'a>>( fn decode_with<'a, T: ImageDecoder<'a>>(
decoder: ImageResult<T>, decoder: ImageResult<T>,
) -> ImageResult<(image::DynamicImage, Option<IccProfile>)> { ) -> ImageResult<(image::DynamicImage, Option<IccProfile>)> {
@ -259,7 +259,7 @@ fn decode_raster(data: &Buffer, format: RasterFormat) -> StrResult<Arc<DecodedIm
/// Decode an SVG image. /// Decode an SVG image.
#[comemo::memoize] #[comemo::memoize]
fn decode_svg( fn decode_svg(
data: &Buffer, data: &Bytes,
loader: Tracked<dyn SvgFontLoader + '_>, loader: Tracked<dyn SvgFontLoader + '_>,
) -> StrResult<Arc<DecodedImage>> { ) -> StrResult<Arc<DecodedImage>> {
// Disable usvg's default to "Times New Roman". Instead, we default to // Disable usvg's default to "Times New Roman". Instead, we default to

View File

@ -45,6 +45,7 @@ pub mod diag;
pub mod eval; pub mod eval;
pub mod doc; pub mod doc;
pub mod export; pub mod export;
pub mod file;
pub mod font; pub mod font;
pub mod geom; pub mod geom;
pub mod ide; pub mod ide;
@ -52,16 +53,15 @@ pub mod image;
pub mod model; pub mod model;
pub mod syntax; pub mod syntax;
use std::path::Path;
use comemo::{Prehashed, Track, TrackedMut}; use comemo::{Prehashed, Track, TrackedMut};
use crate::diag::{FileResult, SourceResult}; use crate::diag::{FileResult, SourceResult};
use crate::doc::Document; use crate::doc::Document;
use crate::eval::{Datetime, Library, Route, Tracer}; use crate::eval::{Datetime, Library, Route, Tracer};
use crate::file::FileId;
use crate::font::{Font, FontBook}; use crate::font::{Font, FontBook};
use crate::syntax::{Source, SourceId}; use crate::syntax::Source;
use crate::util::Buffer; use crate::util::Bytes;
/// Compile a source file into a fully layouted document. /// Compile a source file into a fully layouted document.
#[tracing::instrument(skip(world))] #[tracing::instrument(skip(world))]
@ -79,7 +79,7 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
world, world,
route.track(), route.track(),
TrackedMut::reborrow_mut(&mut tracer), TrackedMut::reborrow_mut(&mut tracer),
world.main(), &world.main(),
)?; )?;
// Typeset the module's contents. // Typeset the module's contents.
@ -87,35 +87,38 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
} }
/// The environment in which typesetting occurs. /// The environment in which typesetting occurs.
///
/// All loading functions (`main`, `source`, `file`, `font`) should perform
/// internal caching so that they are relatively cheap on repeated invocations
/// with the same argument. [`Source`], [`Bytes`], and [`Font`] are
/// all reference-counted and thus cheap to clone.
///
/// The compiler doesn't do the caching itself because the world has much more
/// information on when something can change. For example, fonts typically don't
/// change and can thus even be cached across multiple compilations (for
/// long-running applications like `typst watch`). Source files on the other
/// hand can change and should thus be cleared after. Advanced clients like
/// language servers can also retain the source files and [edited](Source::edit)
/// them in-place to benefit from better incremental performance.
#[comemo::track] #[comemo::track]
pub trait World { pub trait World {
/// The path relative to which absolute paths are.
///
/// Defaults to the empty path.
fn root(&self) -> &Path {
Path::new("")
}
/// The standard library. /// The standard library.
fn library(&self) -> &Prehashed<Library>; fn library(&self) -> &Prehashed<Library>;
/// The main source file.
fn main(&self) -> &Source;
/// Try to resolve the unique id of a source file.
fn resolve(&self, path: &Path) -> FileResult<SourceId>;
/// Access a source file by id.
fn source(&self, id: SourceId) -> &Source;
/// Metadata about all known fonts. /// Metadata about all known fonts.
fn book(&self) -> &Prehashed<FontBook>; fn book(&self) -> &Prehashed<FontBook>;
/// Try to access the font with the given id. /// Access the main source file.
fn font(&self, id: usize) -> Option<Font>; fn main(&self) -> Source;
/// Try to access a file at a path. /// Try to access the specified source file.
fn file(&self, path: &Path) -> FileResult<Buffer>; fn source(&self, id: FileId) -> FileResult<Source>;
/// Try to access the specified file.
fn file(&self, id: FileId) -> FileResult<Bytes>;
/// Try to access the font with the given index in the font book.
fn font(&self, index: usize) -> Option<Font>;
/// Get the current date. /// Get the current date.
/// ///

View File

@ -3,7 +3,7 @@ use unicode_ident::{is_xid_continue, is_xid_start};
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;
use unscanny::Scanner; use unscanny::Scanner;
use super::{ErrorPos, SyntaxKind}; use super::SyntaxKind;
/// Splits up a string of source code into tokens. /// Splits up a string of source code into tokens.
#[derive(Clone)] #[derive(Clone)]
@ -16,7 +16,7 @@ pub(super) struct Lexer<'s> {
/// Whether the last token contained a newline. /// Whether the last token contained a newline.
newline: bool, newline: bool,
/// An error for the last token. /// An error for the last token.
error: Option<(EcoString, ErrorPos)>, error: Option<EcoString>,
} }
/// What kind of tokens to emit. /// What kind of tokens to emit.
@ -69,7 +69,7 @@ impl<'s> Lexer<'s> {
} }
/// Take out the last error, if any. /// Take out the last error, if any.
pub fn take_error(&mut self) -> Option<(EcoString, ErrorPos)> { pub fn take_error(&mut self) -> Option<EcoString> {
self.error.take() self.error.take()
} }
} }
@ -77,7 +77,7 @@ impl<'s> Lexer<'s> {
impl Lexer<'_> { impl Lexer<'_> {
/// Construct a full-positioned syntax error. /// Construct a full-positioned syntax error.
fn error(&mut self, message: impl Into<EcoString>) -> SyntaxKind { fn error(&mut self, message: impl Into<EcoString>) -> SyntaxKind {
self.error = Some((message.into(), ErrorPos::Full)); self.error = Some(message.into());
SyntaxKind::Error SyntaxKind::Error
} }
} }

View File

@ -12,9 +12,9 @@ mod span;
pub use self::kind::SyntaxKind; pub use self::kind::SyntaxKind;
pub use self::lexer::{is_ident, is_newline}; pub use self::lexer::{is_ident, is_newline};
pub use self::node::{ErrorPos, LinkedChildren, LinkedNode, SyntaxNode}; pub use self::node::{LinkedChildren, LinkedNode, SyntaxNode};
pub use self::parser::{parse, parse_code}; pub use self::parser::{parse, parse_code};
pub use self::source::{Source, SourceId}; pub use self::source::Source;
pub use self::span::{Span, Spanned}; pub use self::span::{Span, Spanned};
pub(crate) use self::lexer::{is_id_continue, is_id_start}; pub(crate) use self::lexer::{is_id_continue, is_id_start};

View File

@ -6,8 +6,9 @@ use std::sync::Arc;
use ecow::EcoString; use ecow::EcoString;
use super::ast::AstNode; use super::ast::AstNode;
use super::{SourceId, Span, SyntaxKind}; use super::{Span, SyntaxKind};
use crate::diag::SourceError; use crate::diag::SourceError;
use crate::file::FileId;
/// A node in the untyped syntax tree. /// A node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)] #[derive(Clone, Eq, PartialEq, Hash)]
@ -36,12 +37,8 @@ impl SyntaxNode {
} }
/// Create a new error node. /// Create a new error node.
pub fn error( pub fn error(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
message: impl Into<EcoString>, Self(Repr::Error(Arc::new(ErrorNode::new(message, text))))
text: impl Into<EcoString>,
pos: ErrorPos,
) -> Self {
Self(Repr::Error(Arc::new(ErrorNode::new(message, text, pos))))
} }
/// The type of the node. /// The type of the node.
@ -145,7 +142,7 @@ impl SyntaxNode {
} }
if let Repr::Error(error) = &self.0 { if let Repr::Error(error) = &self.0 {
vec![SourceError::new(error.span, error.message.clone()).with_pos(error.pos)] vec![SourceError::new(error.span, error.message.clone())]
} else { } else {
self.children() self.children()
.filter(|node| node.erroneous()) .filter(|node| node.erroneous())
@ -186,14 +183,14 @@ impl SyntaxNode {
/// Convert the child to an error. /// Convert the child to an error.
pub(super) fn convert_to_error(&mut self, message: impl Into<EcoString>) { pub(super) fn convert_to_error(&mut self, message: impl Into<EcoString>) {
let text = std::mem::take(self).into_text(); let text = std::mem::take(self).into_text();
*self = SyntaxNode::error(message, text, ErrorPos::Full); *self = SyntaxNode::error(message, text);
} }
/// Assign spans to each node. /// Assign spans to each node.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub(super) fn numberize( pub(super) fn numberize(
&mut self, &mut self,
id: SourceId, id: FileId,
within: Range<u64>, within: Range<u64>,
) -> NumberingResult { ) -> NumberingResult {
if within.start >= within.end { if within.start >= within.end {
@ -285,7 +282,7 @@ impl Debug for SyntaxNode {
impl Default for SyntaxNode { impl Default for SyntaxNode {
fn default() -> Self { fn default() -> Self {
Self::error("", "", ErrorPos::Full) Self::error("", "")
} }
} }
@ -381,7 +378,7 @@ impl InnerNode {
/// a `range` of its children. /// a `range` of its children.
fn numberize( fn numberize(
&mut self, &mut self,
id: SourceId, id: FileId,
range: Option<Range<usize>>, range: Option<Range<usize>>,
within: Range<u64>, within: Range<u64>,
) -> NumberingResult { ) -> NumberingResult {
@ -492,7 +489,7 @@ impl InnerNode {
// Try to renumber. // Try to renumber.
let within = start_number..end_number; let within = start_number..end_number;
let id = self.span.source(); let id = self.span.id();
if self.numberize(id, Some(renumber), within).is_ok() { if self.numberize(id, Some(renumber), within).is_ok() {
return Ok(()); return Ok(());
} }
@ -540,23 +537,16 @@ struct ErrorNode {
message: EcoString, message: EcoString,
/// The source text of the node. /// The source text of the node.
text: EcoString, text: EcoString,
/// Where in the node an error should be annotated.
pos: ErrorPos,
/// The node's span. /// The node's span.
span: Span, span: Span,
} }
impl ErrorNode { impl ErrorNode {
/// Create new error node. /// Create new error node.
fn new( fn new(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
message: impl Into<EcoString>,
text: impl Into<EcoString>,
pos: ErrorPos,
) -> Self {
Self { Self {
message: message.into(), message: message.into(),
text: text.into(), text: text.into(),
pos,
span: Span::detached(), span: Span::detached(),
} }
} }
@ -573,17 +563,6 @@ impl Debug for ErrorNode {
} }
} }
/// Where in a node an error should be annotated,
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ErrorPos {
/// Over the full width of the node.
Full,
/// At the start of the node.
Start,
/// At the end of the node.
End,
}
/// A syntax node in a context. /// A syntax node in a context.
/// ///
/// Knows its exact offset in the file and provides access to its /// Knows its exact offset in the file and provides access to its

View File

@ -4,7 +4,7 @@ use std::ops::Range;
use ecow::{eco_format, EcoString}; use ecow::{eco_format, EcoString};
use unicode_math_class::MathClass; use unicode_math_class::MathClass;
use super::{ast, is_newline, ErrorPos, LexMode, Lexer, SyntaxKind, SyntaxNode}; use super::{ast, is_newline, LexMode, Lexer, SyntaxKind, SyntaxNode};
/// Parse a source file. /// Parse a source file.
pub fn parse(text: &str) -> SyntaxNode { pub fn parse(text: &str) -> SyntaxNode {
@ -1560,8 +1560,8 @@ impl<'s> Parser<'s> {
fn save(&mut self) { fn save(&mut self) {
let text = self.current_text(); let text = self.current_text();
if self.at(SyntaxKind::Error) { if self.at(SyntaxKind::Error) {
let (message, pos) = self.lexer.take_error().unwrap(); let message = self.lexer.take_error().unwrap();
self.nodes.push(SyntaxNode::error(message, text, pos)); self.nodes.push(SyntaxNode::error(message, text));
} else { } else {
self.nodes.push(SyntaxNode::leaf(self.current, text)); self.nodes.push(SyntaxNode::leaf(self.current, text));
} }
@ -1608,14 +1608,14 @@ impl<'s> Parser<'s> {
.map_or(true, |child| child.kind() != SyntaxKind::Error) .map_or(true, |child| child.kind() != SyntaxKind::Error)
{ {
let message = eco_format!("expected {}", thing); let message = eco_format!("expected {}", thing);
self.nodes.push(SyntaxNode::error(message, "", ErrorPos::Full)); self.nodes.push(SyntaxNode::error(message, ""));
} }
self.skip(); self.skip();
} }
fn expected_at(&mut self, m: Marker, thing: &str) { fn expected_at(&mut self, m: Marker, thing: &str) {
let message = eco_format!("expected {}", thing); let message = eco_format!("expected {}", thing);
let error = SyntaxNode::error(message, "", ErrorPos::Full); let error = SyntaxNode::error(message, "");
self.nodes.insert(m.0, error); self.nodes.insert(m.0, error);
} }

View File

@ -19,7 +19,7 @@ pub fn reparse(
replacement_len: usize, replacement_len: usize,
) -> Range<usize> { ) -> Range<usize> {
try_reparse(text, replaced, replacement_len, None, root, 0).unwrap_or_else(|| { try_reparse(text, replaced, replacement_len, None, root, 0).unwrap_or_else(|| {
let id = root.span().source(); let id = root.span().id();
*root = parse(text); *root = parse(text);
root.numberize(id, Span::FULL).unwrap(); root.numberize(id, Span::FULL).unwrap();
0..text.len() 0..text.len()

View File

@ -3,105 +3,107 @@
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::ops::Range; use std::ops::Range;
use std::path::{Path, PathBuf}; use std::sync::Arc;
use comemo::Prehashed; use comemo::Prehashed;
use unscanny::Scanner;
use super::ast::Markup; use super::ast::Markup;
use super::reparser::reparse; use super::reparser::reparse;
use super::{is_newline, parse, LinkedNode, Span, SyntaxNode}; use super::{is_newline, parse, LinkedNode, Span, SyntaxNode};
use crate::diag::SourceResult; use crate::diag::SourceResult;
use crate::util::{PathExt, StrExt}; use crate::file::FileId;
use crate::util::StrExt;
/// A source file. /// A source file.
/// ///
/// All line and column indices start at zero, just like byte indices. Only for /// All line and column indices start at zero, just like byte indices. Only for
/// user-facing display, you should add 1 to them. /// user-facing display, you should add 1 to them.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone)] #[derive(Clone)]
pub struct Source { pub struct Source(Arc<Repr>);
id: SourceId,
path: PathBuf, /// The internal representation.
lines: Vec<Line>, #[derive(Clone)]
struct Repr {
id: FileId,
text: Prehashed<String>, text: Prehashed<String>,
root: Prehashed<SyntaxNode>, root: Prehashed<SyntaxNode>,
lines: Vec<Line>,
} }
impl Source { impl Source {
/// Create a new source file. /// Create a new source file.
///
/// The path must be canonical, so that the same source file has the same
/// id even if accessed through different paths.
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
pub fn new(id: SourceId, path: &Path, text: String) -> Self { pub fn new(id: FileId, text: String) -> Self {
let mut root = parse(&text); let mut root = parse(&text);
root.numberize(id, Span::FULL).unwrap(); root.numberize(id, Span::FULL).unwrap();
Self { Self(Arc::new(Repr {
id, id,
path: path.normalize(),
lines: lines(&text), lines: lines(&text),
text: Prehashed::new(text), text: Prehashed::new(text),
root: Prehashed::new(root), root: Prehashed::new(root),
} }))
} }
/// Create a source file without a real id and path, usually for testing. /// Create a source file without a real id and path, usually for testing.
pub fn detached(text: impl Into<String>) -> Self { pub fn detached(text: impl Into<String>) -> Self {
Self::new(SourceId::detached(), Path::new(""), text.into()) Self::new(FileId::detached(), text.into())
} }
/// Create a source file with the same synthetic span for all nodes. /// Create a source file with the same synthetic span for all nodes.
pub fn synthesized(text: String, span: Span) -> Self { pub fn synthesized(text: String, span: Span) -> Self {
let mut root = parse(&text); let mut root = parse(&text);
root.synthesize(span); root.synthesize(span);
Self { Self(Arc::new(Repr {
id: SourceId::detached(), id: FileId::detached(),
path: PathBuf::new(),
lines: lines(&text), lines: lines(&text),
text: Prehashed::new(text), text: Prehashed::new(text),
root: Prehashed::new(root), root: Prehashed::new(root),
} }))
} }
/// The root node of the file's untyped syntax tree. /// The root node of the file's untyped syntax tree.
pub fn root(&self) -> &SyntaxNode { pub fn root(&self) -> &SyntaxNode {
&self.root &self.0.root
} }
/// The root node of the file's typed abstract syntax tree. /// The root node of the file's typed abstract syntax tree.
pub fn ast(&self) -> SourceResult<Markup> { pub fn ast(&self) -> SourceResult<Markup> {
let errors = self.root.errors(); let errors = self.root().errors();
if errors.is_empty() { if errors.is_empty() {
Ok(self.root.cast().expect("root node must be markup")) Ok(self.root().cast().expect("root node must be markup"))
} else { } else {
Err(Box::new(errors)) Err(Box::new(errors))
} }
} }
/// The id of the source file. /// The id of the source file.
pub fn id(&self) -> SourceId { pub fn id(&self) -> FileId {
self.id self.0.id
}
/// The normalized path to the source file.
pub fn path(&self) -> &Path {
&self.path
} }
/// The whole source as a string slice. /// The whole source as a string slice.
pub fn text(&self) -> &str { pub fn text(&self) -> &str {
&self.text &self.0.text
} }
/// Slice out the part of the source code enclosed by the range. /// Slice out the part of the source code enclosed by the range.
pub fn get(&self, range: Range<usize>) -> Option<&str> { pub fn get(&self, range: Range<usize>) -> Option<&str> {
self.text.get(range) self.text().get(range)
} }
/// Fully replace the source text. /// Fully replace the source text.
pub fn replace(&mut self, text: String) { pub fn replace(&mut self, text: String) {
self.text = Prehashed::new(text); let inner = Arc::make_mut(&mut self.0);
self.lines = lines(&self.text); inner.text = Prehashed::new(text);
let mut root = parse(&self.text); inner.lines = lines(&inner.text);
root.numberize(self.id, Span::FULL).unwrap(); let mut root = parse(&inner.text);
self.root = Prehashed::new(root); root.numberize(inner.id, Span::FULL).unwrap();
inner.root = Prehashed::new(root);
} }
/// Edit the source file by replacing the given range. /// Edit the source file by replacing the given range.
@ -112,72 +114,70 @@ impl Source {
#[track_caller] #[track_caller]
pub fn edit(&mut self, replace: Range<usize>, with: &str) -> Range<usize> { pub fn edit(&mut self, replace: Range<usize>, with: &str) -> Range<usize> {
let start_byte = replace.start; let start_byte = replace.start;
let start_utf16 = self.byte_to_utf16(replace.start).unwrap(); let start_utf16 = self.byte_to_utf16(start_byte).unwrap();
self.text.update(|text| text.replace_range(replace.clone(), with)); let line = self.byte_to_line(start_byte).unwrap();
let inner = Arc::make_mut(&mut self.0);
// Update the text itself.
inner.text.update(|text| text.replace_range(replace.clone(), with));
// Remove invalidated line starts. // Remove invalidated line starts.
let line = self.byte_to_line(start_byte).unwrap(); inner.lines.truncate(line + 1);
self.lines.truncate(line + 1);
// Handle adjoining of \r and \n. // Handle adjoining of \r and \n.
if self.text[..start_byte].ends_with('\r') && with.starts_with('\n') { if inner.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
self.lines.pop(); inner.lines.pop();
} }
// Recalculate the line starts after the edit. // Recalculate the line starts after the edit.
self.lines inner.lines.extend(lines_from(
.extend(lines_from(start_byte, start_utf16, &self.text[start_byte..])); start_byte,
start_utf16,
&inner.text[start_byte..],
));
// Incrementally reparse the replaced range. // Incrementally reparse the replaced range.
self.root inner
.update(|root| reparse(root, &self.text, replace, with.len())) .root
.update(|root| reparse(root, &inner.text, replace, with.len()))
} }
/// Get the length of the file in UTF-8 encoded bytes. /// Get the length of the file in UTF-8 encoded bytes.
pub fn len_bytes(&self) -> usize { pub fn len_bytes(&self) -> usize {
self.text.len() self.text().len()
} }
/// Get the length of the file in UTF-16 code units. /// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize { pub fn len_utf16(&self) -> usize {
let last = self.lines.last().unwrap(); let last = self.0.lines.last().unwrap();
last.utf16_idx + self.text[last.byte_idx..].len_utf16() last.utf16_idx + self.0.text[last.byte_idx..].len_utf16()
} }
/// Get the length of the file in lines. /// Get the length of the file in lines.
pub fn len_lines(&self) -> usize { pub fn len_lines(&self) -> usize {
self.lines.len() self.0.lines.len()
} }
/// Find the node with the given span. /// Find the node with the given span.
/// ///
/// Returns `None` if the span does not point into this source file. /// Returns `None` if the span does not point into this source file.
pub fn find(&self, span: Span) -> Option<LinkedNode<'_>> { pub fn find(&self, span: Span) -> Option<LinkedNode<'_>> {
LinkedNode::new(&self.root).find(span) LinkedNode::new(self.root()).find(span)
}
/// Map a span that points into this source file to a byte range.
///
/// Panics if the span does not point into this source file.
#[track_caller]
pub fn range(&self, span: Span) -> Range<usize> {
self.find(span)
.expect("span does not point into this source file")
.range()
} }
/// Return the index of the UTF-16 code unit at the byte index. /// Return the index of the UTF-16 code unit at the byte index.
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> { pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
let line_idx = self.byte_to_line(byte_idx)?; let line_idx = self.byte_to_line(byte_idx)?;
let line = self.lines.get(line_idx)?; let line = self.0.lines.get(line_idx)?;
let head = self.text.get(line.byte_idx..byte_idx)?; let head = self.0.text.get(line.byte_idx..byte_idx)?;
Some(line.utf16_idx + head.len_utf16()) Some(line.utf16_idx + head.len_utf16())
} }
/// Return the index of the line that contains the given byte index. /// Return the index of the line that contains the given byte index.
pub fn byte_to_line(&self, byte_idx: usize) -> Option<usize> { pub fn byte_to_line(&self, byte_idx: usize) -> Option<usize> {
(byte_idx <= self.text.len()).then(|| { (byte_idx <= self.0.text.len()).then(|| {
match self.lines.binary_search_by_key(&byte_idx, |line| line.byte_idx) { match self.0.lines.binary_search_by_key(&byte_idx, |line| line.byte_idx) {
Ok(i) => i, Ok(i) => i,
Err(i) => i - 1, Err(i) => i - 1,
} }
@ -197,33 +197,33 @@ impl Source {
/// Return the byte index at the UTF-16 code unit. /// Return the byte index at the UTF-16 code unit.
pub fn utf16_to_byte(&self, utf16_idx: usize) -> Option<usize> { pub fn utf16_to_byte(&self, utf16_idx: usize) -> Option<usize> {
let line = self.lines.get( let line = self.0.lines.get(
match self.lines.binary_search_by_key(&utf16_idx, |line| line.utf16_idx) { match self.0.lines.binary_search_by_key(&utf16_idx, |line| line.utf16_idx) {
Ok(i) => i, Ok(i) => i,
Err(i) => i - 1, Err(i) => i - 1,
}, },
)?; )?;
let mut k = line.utf16_idx; let mut k = line.utf16_idx;
for (i, c) in self.text[line.byte_idx..].char_indices() { for (i, c) in self.0.text[line.byte_idx..].char_indices() {
if k >= utf16_idx { if k >= utf16_idx {
return Some(line.byte_idx + i); return Some(line.byte_idx + i);
} }
k += c.len_utf16(); k += c.len_utf16();
} }
(k == utf16_idx).then_some(self.text.len()) (k == utf16_idx).then_some(self.0.text.len())
} }
/// Return the byte position at which the given line starts. /// Return the byte position at which the given line starts.
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> { pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
self.lines.get(line_idx).map(|line| line.byte_idx) self.0.lines.get(line_idx).map(|line| line.byte_idx)
} }
/// Return the range which encloses the given line. /// Return the range which encloses the given line.
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> { pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
let start = self.line_to_byte(line_idx)?; let start = self.line_to_byte(line_idx)?;
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text.len()); let end = self.line_to_byte(line_idx + 1).unwrap_or(self.0.text.len());
Some(start..end) Some(start..end)
} }
@ -248,42 +248,21 @@ impl Source {
impl Debug for Source { impl Debug for Source {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Source({})", self.path.display()) write!(f, "Source({})", self.id().path().display())
} }
} }
impl Hash for Source { impl Hash for Source {
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state); self.0.id.hash(state);
self.path.hash(state); self.0.text.hash(state);
self.text.hash(state); self.0.root.hash(state);
self.root.hash(state);
} }
} }
/// A unique identifier for a loaded source file. impl AsRef<str> for Source {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] fn as_ref(&self) -> &str {
pub struct SourceId(u16); self.text()
impl SourceId {
/// Create a new source id for a file that is not part of the world.
pub const fn detached() -> Self {
Self(u16::MAX)
}
/// Whether the source id is the detached.
pub const fn is_detached(self) -> bool {
self.0 == Self::detached().0
}
/// Create a source id from a number.
pub const fn from_u16(v: u16) -> Self {
Self(v)
}
/// Extract the underlying number.
pub const fn as_u16(self) -> u16 {
self.0
} }
} }
@ -309,7 +288,7 @@ fn lines_from(
utf16_offset: usize, utf16_offset: usize,
text: &str, text: &str,
) -> impl Iterator<Item = Line> + '_ { ) -> impl Iterator<Item = Line> + '_ {
let mut s = Scanner::new(text); let mut s = unscanny::Scanner::new(text);
let mut utf16_idx = utf16_offset; let mut utf16_idx = utf16_offset;
std::iter::from_fn(move || { std::iter::from_fn(move || {
@ -340,7 +319,7 @@ mod tests {
fn test_source_file_new() { fn test_source_file_new() {
let source = Source::detached(TEST); let source = Source::detached(TEST);
assert_eq!( assert_eq!(
source.lines, source.0.lines,
[ [
Line { byte_idx: 0, utf16_idx: 0 }, Line { byte_idx: 0, utf16_idx: 0 },
Line { byte_idx: 7, utf16_idx: 6 }, Line { byte_idx: 7, utf16_idx: 6 },
@ -421,8 +400,8 @@ mod tests {
let mut source = Source::detached(prev); let mut source = Source::detached(prev);
let result = Source::detached(after); let result = Source::detached(after);
source.edit(range, with); source.edit(range, with);
assert_eq!(source.text, result.text); assert_eq!(source.text(), result.text());
assert_eq!(source.lines, result.lines); assert_eq!(source.0.lines, result.0.lines);
} }
// Test inserting at the beginning. // Test inserting at the beginning.

View File

@ -2,13 +2,15 @@ use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU64; use std::num::NonZeroU64;
use std::ops::Range; use std::ops::Range;
use super::SourceId; use super::Source;
use crate::file::FileId;
use crate::World;
/// A unique identifier for a syntax node. /// A unique identifier for a syntax node.
/// ///
/// This is used throughout the compiler to track which source section an error /// This is used throughout the compiler to track which source section an error
/// or element stems from. Can be [mapped back](super::Source::range) to a byte /// or element stems from. Can be [mapped back](Self::range) to a byte range for
/// range for user facing display. /// user facing display.
/// ///
/// During editing, the span values stay mostly stable, even for nodes behind an /// During editing, the span values stay mostly stable, even for nodes behind an
/// insertion. This is not true for simple ranges as they would shift. Spans can /// insertion. This is not true for simple ranges as they would shift. Spans can
@ -39,7 +41,7 @@ impl Span {
/// ///
/// Panics if the `number` is not contained in `FULL`. /// Panics if the `number` is not contained in `FULL`.
#[track_caller] #[track_caller]
pub const fn new(id: SourceId, number: u64) -> Self { pub const fn new(id: FileId, number: u64) -> Self {
assert!( assert!(
Self::FULL.start <= number && number < Self::FULL.end, Self::FULL.start <= number && number < Self::FULL.end,
"span number outside valid range" "span number outside valid range"
@ -50,12 +52,12 @@ impl Span {
/// A span that does not point into any source file. /// A span that does not point into any source file.
pub const fn detached() -> Self { pub const fn detached() -> Self {
Self::pack(SourceId::detached(), Self::DETACHED) Self::pack(FileId::detached(), Self::DETACHED)
} }
/// Pack the components into a span. /// Pack the components into a span.
#[track_caller] #[track_caller]
const fn pack(id: SourceId, number: u64) -> Span { const fn pack(id: FileId, number: u64) -> Span {
let bits = ((id.as_u16() as u64) << Self::BITS) | number; let bits = ((id.as_u16() as u64) << Self::BITS) | number;
match NonZeroU64::new(bits) { match NonZeroU64::new(bits) {
Some(v) => Self(v), Some(v) => Self(v),
@ -63,20 +65,38 @@ impl Span {
} }
} }
/// Whether the span is detached.
pub const fn is_detached(self) -> bool {
self.source().is_detached()
}
/// The id of the source file the span points into. /// The id of the source file the span points into.
pub const fn source(self) -> SourceId { pub const fn id(self) -> FileId {
SourceId::from_u16((self.0.get() >> Self::BITS) as u16) FileId::from_u16((self.0.get() >> Self::BITS) as u16)
} }
/// The unique number of the span within its source file. /// The unique number of the span within its source file.
pub const fn number(self) -> u64 { pub const fn number(self) -> u64 {
self.0.get() & ((1 << Self::BITS) - 1) self.0.get() & ((1 << Self::BITS) - 1)
} }
/// Whether the span is detached.
pub const fn is_detached(self) -> bool {
self.id().is_detached()
}
/// Get the byte range for this span.
#[track_caller]
pub fn range(self, world: &dyn World) -> Range<usize> {
let source = world
.source(self.id())
.expect("span does not point into any source file");
self.range_in(&source)
}
/// Get the byte range for this span in the given source file.
#[track_caller]
pub fn range_in(self, source: &Source) -> Range<usize> {
source
.find(self)
.expect("span does not point into this source file")
.range()
}
} }
/// A value with a span locating it in the source code. /// A value with a span locating it in the source code.
@ -116,13 +136,13 @@ impl<T: Debug> Debug for Spanned<T> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{SourceId, Span}; use super::{FileId, Span};
#[test] #[test]
fn test_span_encoding() { fn test_span_encoding() {
let id = SourceId::from_u16(5); let id = FileId::from_u16(5);
let span = Span::new(id, 10); let span = Span::new(id, 10);
assert_eq!(span.source(), id); assert_eq!(span.id(), id);
assert_eq!(span.number(), 10); assert_eq!(span.number(), 10);
} }
} }

View File

@ -5,11 +5,11 @@ use std::sync::Arc;
use comemo::Prehashed; use comemo::Prehashed;
/// A shared buffer that is cheap to clone and hash. /// A shared byte buffer that is cheap to clone and hash.
#[derive(Clone, Hash, Eq, PartialEq)] #[derive(Clone, Hash, Eq, PartialEq)]
pub struct Buffer(Arc<Prehashed<Cow<'static, [u8]>>>); pub struct Bytes(Arc<Prehashed<Cow<'static, [u8]>>>);
impl Buffer { impl Bytes {
/// Create a buffer from a static byte slice. /// Create a buffer from a static byte slice.
pub fn from_static(slice: &'static [u8]) -> Self { pub fn from_static(slice: &'static [u8]) -> Self {
Self(Arc::new(Prehashed::new(Cow::Borrowed(slice)))) Self(Arc::new(Prehashed::new(Cow::Borrowed(slice))))
@ -26,19 +26,19 @@ impl Buffer {
} }
} }
impl From<&[u8]> for Buffer { impl From<&[u8]> for Bytes {
fn from(slice: &[u8]) -> Self { fn from(slice: &[u8]) -> Self {
Self(Arc::new(Prehashed::new(slice.to_vec().into()))) Self(Arc::new(Prehashed::new(slice.to_vec().into())))
} }
} }
impl From<Vec<u8>> for Buffer { impl From<Vec<u8>> for Bytes {
fn from(vec: Vec<u8>) -> Self { fn from(vec: Vec<u8>) -> Self {
Self(Arc::new(Prehashed::new(vec.into()))) Self(Arc::new(Prehashed::new(vec.into())))
} }
} }
impl Deref for Buffer { impl Deref for Bytes {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -46,14 +46,14 @@ impl Deref for Buffer {
} }
} }
impl AsRef<[u8]> for Buffer { impl AsRef<[u8]> for Bytes {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
self self
} }
} }
impl Debug for Buffer { impl Debug for Bytes {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.pad("Buffer(..)") write!(f, "bytes({})", self.len())
} }
} }

View File

@ -2,9 +2,9 @@
pub mod fat; pub mod fat;
mod buffer; mod bytes;
pub use buffer::Buffer; pub use bytes::Bytes;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::Hash; use std::hash::Hash;
@ -125,26 +125,60 @@ where
pub trait PathExt { pub trait PathExt {
/// Lexically normalize a path. /// Lexically normalize a path.
fn normalize(&self) -> PathBuf; fn normalize(&self) -> PathBuf;
/// Treat `self` as a virtual root relative to which the `path` is resolved.
///
/// Returns `None` if the path lexically escapes the root. The path
/// might still escape through symlinks.
fn join_rooted(&self, path: &Path) -> Option<PathBuf>;
} }
impl PathExt for Path { impl PathExt for Path {
#[tracing::instrument(skip_all)]
fn normalize(&self) -> PathBuf { fn normalize(&self) -> PathBuf {
let mut out = PathBuf::new(); let mut out = PathBuf::new();
for component in self.components() { for component in self.components() {
match component { match component {
Component::CurDir => {} Component::CurDir => {}
Component::ParentDir => match out.components().next_back() { Component::ParentDir => match out.components().next_back() {
Some(Component::RootDir) => {}
Some(Component::Normal(_)) => { Some(Component::Normal(_)) => {
out.pop(); out.pop();
} }
_ => out.push(component), _ => out.push(component),
}, },
_ => out.push(component), Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
out.push(component)
}
} }
} }
if out.as_os_str().is_empty() {
out.push(Component::CurDir);
}
out out
} }
fn join_rooted(&self, path: &Path) -> Option<PathBuf> {
let mut parts: Vec<_> = self.components().collect();
let root = parts.len();
for component in path.components() {
match component {
Component::Prefix(_) => return None,
Component::RootDir => parts.truncate(root),
Component::CurDir => {}
Component::ParentDir => {
if parts.len() <= root {
return None;
}
parts.pop();
}
Component::Normal(_) => parts.push(component),
}
}
if parts.len() < root {
return None;
}
Some(parts.into_iter().collect())
}
} }
/// Format pieces separated with commas and a final "and" or "or". /// Format pieces separated with commas and a final "and" or "or".

View File

@ -10,7 +10,6 @@ publish = false
typst = { path = ".." } typst = { path = ".." }
typst-library = { path = "../library" } typst-library = { path = "../library" }
comemo = "0.3" comemo = "0.3"
elsa = "1.8"
iai = { git = "https://github.com/reknih/iai" } iai = { git = "https://github.com/reknih/iai" }
once_cell = "1" once_cell = "1"
oxipng = "8.0.0" oxipng = "8.0.0"

1
tests/packages/adder-0.1.0/lib.typ vendored Normal file
View File

@ -0,0 +1 @@
#let add(x, y) = x + y

4
tests/packages/adder-0.1.0/typst.toml vendored Normal file
View File

@ -0,0 +1,4 @@
[package]
name = "adder"
version = "0.1.0"
entrypoint = "lib.typ"

View File

@ -1,13 +1,12 @@
use std::path::Path;
use comemo::{Prehashed, Track, Tracked}; use comemo::{Prehashed, Track, Tracked};
use iai::{black_box, main, Iai}; use iai::{black_box, main, Iai};
use typst::diag::{FileError, FileResult}; use typst::diag::FileResult;
use typst::eval::{Datetime, Library}; use typst::eval::{Datetime, Library};
use typst::file::FileId;
use typst::font::{Font, FontBook}; use typst::font::{Font, FontBook};
use typst::geom::Color; use typst::geom::Color;
use typst::syntax::{Source, SourceId}; use typst::syntax::Source;
use typst::util::Buffer; use typst::util::Bytes;
use typst::World; use typst::World;
use unscanny::Scanner; use unscanny::Scanner;
@ -124,31 +123,27 @@ impl World for BenchWorld {
&self.library &self.library
} }
fn main(&self) -> &Source {
&self.source
}
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
Err(FileError::NotFound(path.into()))
}
fn source(&self, _: SourceId) -> &Source {
&self.source
}
fn book(&self) -> &Prehashed<FontBook> { fn book(&self) -> &Prehashed<FontBook> {
&self.book &self.book
} }
fn main(&self) -> Source {
self.source.clone()
}
fn source(&self, _: FileId) -> FileResult<Source> {
unimplemented!()
}
fn file(&self, _: FileId) -> FileResult<Bytes> {
unimplemented!()
}
fn font(&self, _: usize) -> Option<Font> { fn font(&self, _: usize) -> Option<Font> {
Some(self.font.clone()) Some(self.font.clone())
} }
fn file(&self, path: &Path) -> FileResult<Buffer> {
Err(FileError::NotFound(path.into()))
}
fn today(&self, _: Option<i64>) -> Option<Datetime> { fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap()) unimplemented!()
} }
} }

View File

@ -13,11 +13,11 @@ use std::path::{Path, PathBuf};
use clap::Parser; use clap::Parser;
use comemo::{Prehashed, Track}; use comemo::{Prehashed, Track};
use elsa::FrozenVec;
use oxipng::{InFile, Options, OutFile}; use oxipng::{InFile, Options, OutFile};
use rayon::iter::{ParallelBridge, ParallelIterator}; use rayon::iter::{ParallelBridge, ParallelIterator};
use std::cell::OnceCell; use std::cell::OnceCell;
use tiny_skia as sk; use tiny_skia as sk;
use typst::file::FileId;
use unscanny::Scanner; use unscanny::Scanner;
use walkdir::WalkDir; use walkdir::WalkDir;
@ -26,8 +26,8 @@ use typst::doc::{Document, Frame, FrameItem, Meta};
use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value}; use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value};
use typst::font::{Font, FontBook}; use typst::font::{Font, FontBook};
use typst::geom::{Abs, Color, RgbaColor, Smart}; use typst::geom::{Abs, Color, RgbaColor, Smart};
use typst::syntax::{Source, SourceId, Span, SyntaxNode}; use typst::syntax::{Source, Span, SyntaxNode};
use typst::util::{Buffer, PathExt}; use typst::util::{Bytes, PathExt};
use typst::World; use typst::World;
use typst_library::layout::{Margin, PageElem}; use typst_library::layout::{Margin, PageElem};
use typst_library::text::{TextElem, TextSize}; use typst_library::text::{TextElem, TextSize};
@ -197,34 +197,21 @@ fn library() -> Library {
} }
/// A world that provides access to the tests environment. /// A world that provides access to the tests environment.
#[derive(Clone)]
struct TestWorld { struct TestWorld {
print: PrintConfig, print: PrintConfig,
main: FileId,
library: Prehashed<Library>, library: Prehashed<Library>,
book: Prehashed<FontBook>, book: Prehashed<FontBook>,
fonts: Vec<Font>, fonts: Vec<Font>,
paths: RefCell<HashMap<PathBuf, PathSlot>>, paths: RefCell<HashMap<PathBuf, PathSlot>>,
sources: FrozenVec<Box<Source>>,
main: SourceId,
} }
impl Clone for TestWorld { #[derive(Clone)]
fn clone(&self) -> Self {
Self {
print: self.print,
library: self.library.clone(),
book: self.book.clone(),
fonts: self.fonts.clone(),
paths: self.paths.clone(),
sources: FrozenVec::from_iter(self.sources.iter().cloned().map(Box::new)),
main: self.main,
}
}
}
#[derive(Default, Clone)]
struct PathSlot { struct PathSlot {
source: OnceCell<FileResult<SourceId>>, system_path: PathBuf,
buffer: OnceCell<FileResult<Buffer>>, source: OnceCell<FileResult<Source>>,
buffer: OnceCell<FileResult<Bytes>>,
} }
impl TestWorld { impl TestWorld {
@ -243,92 +230,81 @@ impl TestWorld {
Self { Self {
print, print,
main: FileId::detached(),
library: Prehashed::new(library()), library: Prehashed::new(library()),
book: Prehashed::new(FontBook::from_fonts(&fonts)), book: Prehashed::new(FontBook::from_fonts(&fonts)),
fonts, fonts,
paths: RefCell::default(), paths: RefCell::default(),
sources: FrozenVec::new(),
main: SourceId::detached(),
} }
} }
} }
impl World for TestWorld { impl World for TestWorld {
fn root(&self) -> &Path {
Path::new(FILE_DIR)
}
fn library(&self) -> &Prehashed<Library> { fn library(&self) -> &Prehashed<Library> {
&self.library &self.library
} }
fn main(&self) -> &Source {
self.source(self.main)
}
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
self.slot(path)
.source
.get_or_init(|| {
let buf = read(path)?;
let text = String::from_utf8(buf)?;
Ok(self.insert(path, text))
})
.clone()
}
fn source(&self, id: SourceId) -> &Source {
&self.sources[id.as_u16() as usize]
}
fn book(&self) -> &Prehashed<FontBook> { fn book(&self) -> &Prehashed<FontBook> {
&self.book &self.book
} }
fn font(&self, id: usize) -> Option<Font> { fn main(&self) -> Source {
Some(self.fonts[id].clone()) self.source(self.main).unwrap()
} }
fn file(&self, path: &Path) -> FileResult<Buffer> { fn source(&self, id: FileId) -> FileResult<Source> {
self.slot(path) let slot = self.slot(id)?;
.buffer slot.source
.get_or_init(|| read(path).map(Buffer::from)) .get_or_init(|| {
let buf = read(&slot.system_path)?;
let text = String::from_utf8(buf)?;
Ok(Source::new(id, text))
})
.clone() .clone()
} }
fn file(&self, id: FileId) -> FileResult<Bytes> {
let slot = self.slot(id)?;
slot.buffer
.get_or_init(|| read(&slot.system_path).map(Bytes::from))
.clone()
}
fn font(&self, id: usize) -> Option<Font> {
Some(self.fonts[id].clone())
}
fn today(&self, _: Option<i64>) -> Option<Datetime> { fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap()) Some(Datetime::from_ymd(1970, 1, 1).unwrap())
} }
} }
impl TestWorld { impl TestWorld {
fn set(&mut self, path: &Path, text: String) -> SourceId { fn set(&mut self, path: &Path, text: String) -> Source {
let slot = self.slot(path); self.main = FileId::new(None, path);
let id = if let Some(&Ok(id)) = slot.source.get() { let mut slot = self.slot(self.main).unwrap();
drop(slot); let source = Source::new(self.main, text);
self.sources.as_mut()[id.as_u16() as usize].replace(text); slot.source = OnceCell::from(Ok(source.clone()));
id source
} else { }
let id = self.insert(path, text);
slot.source.set(Ok(id)).unwrap(); fn slot(&self, id: FileId) -> FileResult<RefMut<PathSlot>> {
drop(slot); let path = id.path();
id let root: PathBuf = match id.package() {
Some(spec) => format!("packages/{}-{}", spec.name, spec.version).into(),
None if path.is_relative() => PathBuf::new(),
None => FILE_DIR.into(),
}; };
self.main = id;
id
}
fn slot(&self, path: &Path) -> RefMut<PathSlot> { let system_path = root.join_rooted(id.path()).ok_or(FileError::AccessDenied)?;
RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(path.normalize()).or_default()
})
}
fn insert(&self, path: &Path, text: String) -> SourceId { Ok(RefMut::map(self.paths.borrow_mut(), |paths| {
let id = SourceId::from_u16(self.sources.len() as u16); paths.entry(system_path.clone()).or_insert_with(|| PathSlot {
let source = Source::new(id, path, text); system_path,
self.sources.push(Box::new(source)); source: OnceCell::new(),
id buffer: OnceCell::new(),
})
}))
} }
} }
@ -522,26 +498,25 @@ fn test_part(
) -> (bool, bool, Vec<Frame>) { ) -> (bool, bool, Vec<Frame>) {
let mut ok = true; let mut ok = true;
let id = world.set(src_path, text); let source = world.set(src_path, text);
let source = world.source(id);
if world.print.syntax { if world.print.syntax {
writeln!(output, "Syntax Tree:\n{:#?}\n", source.root()).unwrap(); writeln!(output, "Syntax Tree:\n{:#?}\n", source.root()).unwrap();
} }
let metadata = parse_part_metadata(source); let metadata = parse_part_metadata(&source);
let compare_ref = metadata.part_configuration.compare_ref.unwrap_or(compare_ref); let compare_ref = metadata.part_configuration.compare_ref.unwrap_or(compare_ref);
let validate_hints = let validate_hints =
metadata.part_configuration.validate_hints.unwrap_or(validate_hints); metadata.part_configuration.validate_hints.unwrap_or(validate_hints);
ok &= test_spans(output, source.root()); ok &= test_spans(output, source.root());
ok &= test_reparse(output, world.source(id).text(), i, rng); ok &= test_reparse(output, source.text(), i, rng);
if world.print.model { if world.print.model {
let world = (world as &dyn World).track(); let world = (world as &dyn World).track();
let route = typst::eval::Route::default(); let route = typst::eval::Route::default();
let mut tracer = typst::eval::Tracer::default(); let mut tracer = typst::eval::Tracer::default();
let module = let module =
typst::eval::eval(world, route.track(), tracer.track_mut(), source).unwrap(); typst::eval::eval(world, route.track(), tracer.track_mut(), &source).unwrap();
writeln!(output, "Model:\n{:#?}\n", module.content()).unwrap(); writeln!(output, "Model:\n{:#?}\n", module.content()).unwrap();
} }
@ -563,15 +538,17 @@ fn test_part(
// however, as the line of the hint is still verified. // however, as the line of the hint is still verified.
let actual_errors_and_hints: HashSet<UserOutput> = errors let actual_errors_and_hints: HashSet<UserOutput> = errors
.into_iter() .into_iter()
.filter(|error| error.span.source() == id) .inspect(|error| assert!(!error.span.is_detached()))
.filter(|error| error.span.id() == source.id())
.flat_map(|error| { .flat_map(|error| {
let range = error.span.range(world);
let output_error = let output_error =
UserOutput::Error(error.range(world), error.message.replace('\\', "/")); UserOutput::Error(range.clone(), error.message.replace('\\', "/"));
let hints = error let hints = error
.hints .hints
.iter() .iter()
.filter(|_| validate_hints) // No unexpected hints should be verified if disabled. .filter(|_| validate_hints) // No unexpected hints should be verified if disabled.
.map(|hint| UserOutput::Hint(error.range(world), hint.to_string())); .map(|hint| UserOutput::Hint(range.clone(), hint.to_string()));
iter::once(output_error).chain(hints).collect::<Vec<_>>() iter::once(output_error).chain(hints).collect::<Vec<_>>()
}) })
.collect(); .collect();
@ -596,12 +573,12 @@ fn test_part(
for unexpected in unexpected_outputs { for unexpected in unexpected_outputs {
write!(output, " Not annotated | ").unwrap(); write!(output, " Not annotated | ").unwrap();
print_user_output(output, source, line, unexpected) print_user_output(output, &source, line, unexpected)
} }
for missing in missing_outputs { for missing in missing_outputs {
write!(output, " Not emitted | ").unwrap(); write!(output, " Not emitted | ").unwrap();
print_user_output(output, source, line, missing) print_user_output(output, &source, line, missing)
} }
} }
@ -820,7 +797,7 @@ fn test_reparse(
let source = Source::detached(text); let source = Source::detached(text);
let leafs = leafs(source.root()); let leafs = leafs(source.root());
let start = source.range(leafs[pick(0..leafs.len())].span()).start; let start = source.find(leafs[pick(0..leafs.len())].span()).unwrap().offset();
let supplement = supplements[pick(0..supplements.len())]; let supplement = supplements[pick(0..supplements.len())];
ok &= apply(start..start, supplement); ok &= apply(start..start, supplement);

View File

@ -1,4 +1,4 @@
// Test diagnostics. // Test hints on diagnostics.
// Ref: false // Ref: false
--- ---
@ -23,13 +23,17 @@
--- ---
= Heading <intro> = Heading <intro>
// Error: 1:20-1:26 cannot reference heading without numbering // Error: 1:20-1:26 cannot reference heading without numbering
// Hint: 1:20-1:26 did you mean to use `#set heading(numbering: "1.")`? // Hint: 1:20-1:26 you can enable heading numbering with `#set heading(numbering: "1.")`?
Can not be used as @intro Can not be used as @intro
--- ---
// This test is more of a tooling test. It checks if hint annotation validation
// can be turned off.
// Hints: false // Hints: false
// This test is more of a tooling test. It checks if hint annotation validation can be turned off.
= Heading <intro> = Heading <intro>
// Error: 1:20-1:26 cannot reference heading without numbering // Error: 1:20-1:26 cannot reference heading without numbering
Can not be used as @intro Can not be used as @intro

View File

@ -0,0 +1,64 @@
// Test package imports
// Ref: false
---
// Test import without items.
#import "@test/adder:0.1.0"
#test(adder.add(2, 8), 10)
---
// Test import with items.
#import "@test/adder:0.1.0": add
#test(add(2, 8), 10)
---
// Error: 9-13 `@` is not a valid package namespace
#import "@@": *
---
// Error: 9-16 package specification is missing name
#import "@heya": *
---
// Error: 9-15 `123` is not a valid package namespace
#import "@123": *
---
// Error: 9-17 package specification is missing name
#import "@test/": *
---
// Error: 9-22 package specification is missing version
#import "@test/mypkg": *
---
// Error: 9-20 `$$$` is not a valid package name
#import "@test/$$$": *
---
// Error: 9-23 package specification is missing version
#import "@test/mypkg:": *
---
// Error: 9-24 version number is missing minor version
#import "@test/mypkg:0": *
---
// Error: 9-29 `latest` is not a valid major version
#import "@test/mypkg:latest": *
---
// Error: 9-29 `-3` is not a valid major version
#import "@test/mypkg:-3.0.0": *
---
// Error: 9-26 version number is missing patch version
#import "@test/mypkg:0.3": *
---
// Error: 9-27 version number is missing patch version
#import "@test/mypkg:0.3.": *
---
// Error: 9-28 file not found (searched at typ/compiler/#test/mypkg:1.0.0)
#import "#test/mypkg:1.0.0": *

View File

@ -54,9 +54,9 @@ A #box(image("/tiger.jpg", height: 1cm, width: 80%)) B
#image("path/does/not/exist") #image("path/does/not/exist")
--- ---
// Error: 8-21 unknown image format // Error: 2-22 unknown image format
#image("./image.typ") #image("./image.typ")
--- ---
// Error: 8-18 failed to parse svg: found closing tag 'g' instead of 'style' in line 4 // Error: 2-19 failed to parse svg: found closing tag 'g' instead of 'style' in line 4
#image("/bad.svg") #image("/bad.svg")