Basic package management

This commit is contained in:
Laurenz 2023-06-26 13:57:21 +02:00
parent 9c7f31870b
commit 7b92bd7c34
44 changed files with 1413 additions and 810 deletions

132
Cargo.lock generated
View File

@ -522,6 +522,9 @@ name = "ecow"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5c5051925c54d9a42c8652313b5358a7432eed209466b443ed5220431243a14"
dependencies = [
"serde",
]
[[package]]
name = "either"
@ -529,15 +532,6 @@ version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "elsa"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5e0aca8dce8856e420195bd13b6a64de3334235ccc9214e824b86b12bf26283"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "enum-ordinalize"
version = "3.1.13"
@ -1726,6 +1720,21 @@ dependencies = [
"bytemuck",
]
[[package]]
name = "ring"
version = "0.16.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
dependencies = [
"cc",
"libc",
"once_cell",
"spin",
"untrusted",
"web-sys",
"winapi",
]
[[package]]
name = "roff"
version = "0.2.1"
@ -1783,6 +1792,28 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "rustls"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f"
dependencies = [
"log",
"ring",
"rustls-webpki",
"sct",
]
[[package]]
name = "rustls-webpki"
version = "0.100.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "rustversion"
version = "1.0.12"
@ -1826,6 +1857,16 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]]
name = "sct"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
dependencies = [
"ring",
"untrusted",
]
[[package]]
name = "semver"
version = "1.0.17"
@ -1929,6 +1970,12 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "spin"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
[[package]]
name = "stable_deref_trait"
version = "1.2.0"
@ -2091,6 +2138,17 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tar"
version = "0.4.38"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b55807c0344e1e6c04d7c965f5289c39a8d94ae23ed5c0b57aabac549f871c6"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "tempfile"
version = "3.5.0"
@ -2382,6 +2440,7 @@ dependencies = [
"svg2pdf",
"time",
"tiny-skia",
"toml",
"tracing",
"ttf-parser",
"typst-macros",
@ -2405,7 +2464,7 @@ dependencies = [
"codespan-reporting",
"comemo",
"dirs",
"elsa",
"flate2",
"inferno",
"memmap2",
"notify",
@ -2413,6 +2472,7 @@ dependencies = [
"open",
"same-file",
"siphasher",
"tar",
"tempfile",
"tracing",
"tracing-error",
@ -2420,6 +2480,7 @@ dependencies = [
"tracing-subscriber",
"typst",
"typst-library",
"ureq",
"walkdir",
]
@ -2496,7 +2557,6 @@ version = "0.5.0"
dependencies = [
"clap 4.2.7",
"comemo",
"elsa",
"iai",
"once_cell",
"oxipng",
@ -2623,6 +2683,28 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47"
[[package]]
name = "untrusted"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
[[package]]
name = "ureq"
version = "2.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b11c96ac7ee530603dcdf68ed1557050f374ce55a5a07193ebf8cbc9f8927e9"
dependencies = [
"base64",
"flate2",
"log",
"once_cell",
"rustls",
"rustls-webpki",
"url",
"webpki-roots",
]
[[package]]
name = "url"
version = "2.3.1"
@ -2788,6 +2870,25 @@ version = "0.2.86"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93"
[[package]]
name = "web-sys"
version = "0.3.63"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2"
dependencies = [
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "webpki-roots"
version = "0.23.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338"
dependencies = [
"rustls-webpki",
]
[[package]]
name = "weezl"
version = "0.1.7"
@ -2999,6 +3100,15 @@ dependencies = [
"tap",
]
[[package]]
name = "xattr"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d1526bbe5aaeb5eb06885f4d987bcdfa5e23187055de9b83fe00156a821fabc"
dependencies = [
"libc",
]
[[package]]
name = "xmlparser"
version = "0.13.5"

View File

@ -34,7 +34,7 @@ typst-macros = { path = "macros" }
bitflags = { version = "2", features = ["serde"] }
bytemuck = "1"
comemo = "0.3"
ecow = "0.1"
ecow = { version = "0.1.1", features = ["serde"] }
flate2 = "1"
fontdb = "0.13"
if_chain = "1"
@ -54,6 +54,7 @@ siphasher = "0.3"
subsetter = "0.1.1"
svg2pdf = { git = "https://github.com/typst/svg2pdf" }
tiny-skia = "0.9.0"
toml = { version = "0.7.3", default-features = false, features = ["parse"] }
tracing = "0.1.37"
ttf-parser = "0.18.1"
unicode-general-category = "0.6"

View File

@ -176,6 +176,9 @@ cargo build --release
The optimized binary will be stored in `target/release/`.
Another good way to contribute is by [sharing packages][packages] with the
community.
## Pronunciation and Spelling
IPA: /taɪpst/. "Ty" like in **Ty**pesetting and "pst" like in Hi**pst**er. When
writing about Typst, capitalize its name as a proper noun, with a capital "T".
@ -219,4 +222,5 @@ instant preview. To achieve these goals, we follow three core design principles:
[releases]: https://github.com/typst/typst/releases/
[architecture]: https://github.com/typst/typst/blob/main/ARCHITECTURE.md
[contributing]: https://github.com/typst/typst/blob/main/CONTRIBUTING.md
[packages]: https://github.com/typst/packages/
[`comemo`]: https://github.com/typst/comemo/

View File

@ -27,7 +27,7 @@ clap = { version = "4.2.4", features = ["derive", "env"] }
codespan-reporting = "0.11"
comemo = "0.3"
dirs = "5"
elsa = "1.8"
flate2 = "1"
inferno = "0.11.15"
memmap2 = "0.5"
notify = "5"
@ -35,11 +35,13 @@ once_cell = "1"
open = "4.0.2"
same-file = "1"
siphasher = "0.3"
tar = "0.4"
tempfile = "3.5.0"
tracing = "0.1.37"
tracing-error = "0.2"
tracing-flame = "0.2.0"
tracing-subscriber = "0.3.17"
ureq = "2"
walkdir = "2"
[build-dependencies]

View File

@ -12,13 +12,14 @@ pub struct CliArguments {
pub command: Command,
/// Add additional directories to search for fonts
#[clap(long = "font-path", env = "TYPST_FONT_PATHS", value_name = "DIR", action = ArgAction::Append)]
#[clap(
long = "font-path",
env = "TYPST_FONT_PATHS",
value_name = "DIR",
action = ArgAction::Append,
)]
pub font_paths: Vec<PathBuf>,
/// Configure the root for absolute paths
#[clap(long = "root", env = "TYPST_ROOT", value_name = "DIR")]
pub root: Option<PathBuf>,
/// Sets the level of logging verbosity:
/// -v = warning & error, -vv = info, -vvv = debug, -vvvv = trace
#[clap(short, long, action = ArgAction::Count)]

View File

@ -2,7 +2,8 @@ mod args;
mod trace;
use std::cell::{Cell, RefCell, RefMut};
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::env;
use std::fs::{self, File};
use std::hash::Hash;
use std::io::{self, IsTerminal, Write};
@ -14,20 +15,22 @@ use clap::Parser;
use codespan_reporting::diagnostic::{Diagnostic, Label};
use codespan_reporting::term::{self, termcolor};
use comemo::Prehashed;
use elsa::FrozenVec;
use memmap2::Mmap;
use notify::{RecommendedWatcher, RecursiveMode, Watcher};
use same_file::{is_same_file, Handle};
use siphasher::sip128::{Hasher128, SipHasher13};
use std::cell::OnceCell;
use termcolor::{ColorChoice, StandardStream, WriteColor};
use typst::diag::{bail, FileError, FileResult, SourceError, StrResult};
use typst::diag::{
bail, FileError, FileResult, PackageError, PackageResult, SourceError, StrResult,
};
use typst::doc::Document;
use typst::eval::{eco_format, Datetime, Library};
use typst::file::{FileId, PackageSpec};
use typst::font::{Font, FontBook, FontInfo, FontVariant};
use typst::geom::Color;
use typst::syntax::{Source, SourceId};
use typst::util::{Buffer, PathExt};
use typst::syntax::Source;
use typst::util::{Bytes, PathExt};
use typst::World;
use walkdir::WalkDir;
@ -96,8 +99,6 @@ struct CompileSettings {
output: PathBuf,
/// Whether to watch the input files for changes.
watch: bool,
/// The root directory for absolute paths.
root: Option<PathBuf>,
/// The paths to search for fonts.
font_paths: Vec<PathBuf>,
/// The open command to use.
@ -115,7 +116,6 @@ impl CompileSettings {
input: PathBuf,
output: Option<PathBuf>,
watch: bool,
root: Option<PathBuf>,
font_paths: Vec<PathBuf>,
open: Option<Option<String>>,
ppi: Option<f32>,
@ -129,7 +129,6 @@ impl CompileSettings {
input,
output,
watch,
root,
font_paths,
open,
diagnostic_format,
@ -150,16 +149,7 @@ impl CompileSettings {
_ => unreachable!(),
};
Self::new(
input,
output,
watch,
args.root,
args.font_paths,
open,
ppi,
diagnostic_format,
)
Self::new(input, output, watch, args.font_paths, open, ppi, diagnostic_format)
}
}
@ -190,20 +180,8 @@ impl FontsSettings {
/// Execute a compilation command.
fn compile(mut command: CompileSettings) -> StrResult<()> {
// Determine the parent directory of the input file.
let parent = command
.input
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.parent())
.unwrap_or(Path::new("."))
.to_owned();
let root = command.root.as_ref().unwrap_or(&parent);
// Create the world that serves sources, fonts and files.
let mut world = SystemWorld::new(root.into(), &command.font_paths);
// Create the world that serves sources, files, and fonts.
let mut world = SystemWorld::new(&command.input, &command.font_paths);
// Perform initial compilation.
let ok = compile_once(&mut world, &command)?;
@ -223,29 +201,10 @@ fn compile(mut command: CompileSettings) -> StrResult<()> {
// Setup file watching.
let (tx, rx) = std::sync::mpsc::channel();
let mut watcher = RecommendedWatcher::new(tx, notify::Config::default())
.map_err(|_| "failed to watch directory")?;
.map_err(|_| "failed to setup file watching")?;
// Watch the input file's parent directory recursively.
watcher
.watch(&parent, RecursiveMode::Recursive)
.map_err(|_| "failed to watch parent directory")?;
// Watch the root directory recursively.
if world.root != parent {
watcher
.watch(&world.root, RecursiveMode::Recursive)
.map_err(|_| "failed to watch root directory")?;
}
// Watch all the files that are used in the input file and its dependencies
let mut dependencies = world.dependencies();
for dep in &dependencies {
tracing::debug!("Watching {:?}", dep);
watcher
.watch(dep, RecursiveMode::NonRecursive)
.map_err(|_| format!("failed to watch {:?}", dep))?;
}
// Watch all the files that are used by the input file and its dependencies.
world.watch(&mut watcher, HashSet::new())?;
// Handle events.
let timeout = std::time::Duration::from_millis(100);
@ -265,28 +224,21 @@ fn compile(mut command: CompileSettings) -> StrResult<()> {
continue;
}
recompile |= world.relevant(&event);
recompile |= is_event_relevant(&event);
}
if recompile {
// Retrieve the dependencies of the last compilation.
let dependencies = world.dependencies();
// Recompile.
let ok = compile_once(&mut world, &command)?;
comemo::evict(30);
comemo::evict(10);
// Unwatch all the previous dependencies before watching the new dependencies
for dep in &dependencies {
watcher
.unwatch(dep)
.map_err(|_| format!("failed to unwatch {:?}", dep))?;
}
dependencies = world.dependencies();
for dep in &dependencies {
tracing::debug!("Watching {:?}", dep);
watcher
.watch(dep, RecursiveMode::NonRecursive)
.map_err(|_| format!("failed to watch {:?}", dep))?;
}
// Adjust the watching.
world.watch(&mut watcher, dependencies)?;
// Ipen the file if requested, this must be done on the first
// Open the file if requested, this must be done on the first
// **successful** compilation
if ok {
if let Some(open) = command.open.take() {
@ -307,8 +259,9 @@ fn compile_once(world: &mut SystemWorld, command: &CompileSettings) -> StrResult
let start = std::time::Instant::now();
status(command, Status::Compiling).unwrap();
// Reset everything and ensure that the main file is still present.
world.reset();
world.main = world.resolve(&command.input).map_err(|err| err.to_string())?;
world.source(world.main).map_err(|err| err.to_string())?;
let result = typst::compile(world);
let duration = start.elapsed();
@ -461,7 +414,6 @@ fn print_diagnostics(
for error in errors {
// The main diagnostic.
let range = error.range(world);
let diag = Diagnostic::error()
.with_message(error.message)
.with_notes(
@ -471,7 +423,7 @@ fn print_diagnostics(
.map(|e| (eco_format!("hint: {e}")).into())
.collect(),
)
.with_labels(vec![Label::primary(error.span.source(), range)]);
.with_labels(vec![Label::primary(error.span.id(), error.span.range(world))]);
term::emit(&mut w, &config, world, &diag)?;
@ -479,10 +431,7 @@ fn print_diagnostics(
for point in error.trace {
let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary(
point.span.source(),
world.source(point.span.source()).range(point.span),
),
Label::primary(point.span.id(), point.span.range(world)),
]);
term::emit(&mut w, &config, world, &help)?;
@ -492,19 +441,6 @@ fn print_diagnostics(
Ok(())
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Execute a font listing command.
fn fonts(command: FontsSettings) -> StrResult<()> {
let mut searcher = FontSearcher::new();
@ -525,196 +461,224 @@ fn fonts(command: FontsSettings) -> StrResult<()> {
/// A world that provides access to the operating system.
struct SystemWorld {
/// The root relative to which absolute paths are resolved.
root: PathBuf,
/// The input path.
main: FileId,
/// Typst's standard library.
library: Prehashed<Library>,
/// Metadata about discovered fonts.
book: Prehashed<FontBook>,
/// Locations of and storage for lazily loaded fonts.
fonts: Vec<FontSlot>,
hashes: RefCell<HashMap<PathBuf, FileResult<PathHash>>>,
/// Maps package-path combinations to canonical hashes. All package-path
/// combinations that point to the same file are mapped to the same hash. To
/// be used in conjunction with `paths`.
hashes: RefCell<HashMap<FileId, FileResult<PathHash>>>,
/// Maps canonical path hashes to source files and buffers.
paths: RefCell<HashMap<PathHash, PathSlot>>,
sources: FrozenVec<Box<Source>>,
today: Cell<Option<Datetime>>,
main: SourceId,
dependencies: RefCell<Vec<PathBuf>>,
/// The current date if requested. This is stored here to ensure it is
/// always the same within one compilation. Reset between compilations.
today: OnceCell<Option<Datetime>>,
}
/// Holds details about the location of a font and lazily the font itself.
struct FontSlot {
/// The path at which the font can be found on the system.
path: PathBuf,
/// The index of the font in its collection. Zero if the path does not point
/// to a collection.
index: u32,
/// The lazily loaded font.
font: OnceCell<Option<Font>>,
}
/// Holds canonical data for all paths pointing to the same entity.
#[derive(Default)]
///
/// Both fields can be populated if the file is both imported and read().
struct PathSlot {
source: OnceCell<FileResult<SourceId>>,
buffer: OnceCell<FileResult<Buffer>>,
/// The slot's path on the system.
system_path: PathBuf,
/// The lazily loaded source file for a path hash.
source: OnceCell<FileResult<Source>>,
/// The lazily loaded buffer for a path hash.
buffer: OnceCell<FileResult<Bytes>>,
}
impl SystemWorld {
fn new(root: PathBuf, font_paths: &[PathBuf]) -> Self {
fn new(input: &Path, font_paths: &[PathBuf]) -> Self {
let mut searcher = FontSearcher::new();
searcher.search(font_paths);
let root = input
.canonicalize()
.ok()
.as_ref()
.and_then(|path| path.parent())
.unwrap_or(Path::new("."))
.to_owned();
let file = input.file_name().unwrap_or(input.as_os_str());
let main = FileId::new(None, Path::new(file));
Self {
root,
main,
library: Prehashed::new(typst_library::build()),
book: Prehashed::new(searcher.book),
fonts: searcher.fonts,
hashes: RefCell::default(),
paths: RefCell::default(),
sources: FrozenVec::new(),
today: Cell::new(None),
main: SourceId::detached(),
dependencies: RefCell::default(),
today: OnceCell::new(),
}
}
}
impl World for SystemWorld {
fn root(&self) -> &Path {
&self.root
}
fn library(&self) -> &Prehashed<Library> {
&self.library
}
fn main(&self) -> &Source {
self.source(self.main)
}
#[tracing::instrument(skip_all)]
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
self.slot(path)?
.source
.get_or_init(|| {
let buf = read(path)?;
let text = if buf.starts_with(b"\xef\xbb\xbf") {
// remove UTF-8 BOM
std::str::from_utf8(&buf[3..])?.to_owned()
} else {
// Assume UTF-8
String::from_utf8(buf)?
};
self.dependencies.borrow_mut().push(path.to_owned());
Ok(self.insert(path, text))
})
.clone()
}
fn source(&self, id: SourceId) -> &Source {
&self.sources[id.as_u16() as usize]
}
fn book(&self) -> &Prehashed<FontBook> {
&self.book
}
fn main(&self) -> Source {
self.source(self.main).unwrap()
}
fn source(&self, id: FileId) -> FileResult<Source> {
let slot = self.slot(id)?;
slot.source
.get_or_init(|| {
let buf = read(&slot.system_path)?;
let text = decode_utf8(buf)?;
Ok(Source::new(id, text))
})
.clone()
}
fn file(&self, id: FileId) -> FileResult<Bytes> {
let slot = self.slot(id)?;
slot.buffer
.get_or_init(|| read(&slot.system_path).map(Bytes::from))
.clone()
}
fn font(&self, id: usize) -> Option<Font> {
let slot = &self.fonts[id];
slot.font
.get_or_init(|| {
let data = self.file(&slot.path).ok()?;
let data = read(&slot.path).ok()?.into();
Font::new(data, slot.index)
})
.clone()
}
fn file(&self, path: &Path) -> FileResult<Buffer> {
self.slot(path)?
.buffer
.get_or_init(|| {
self.dependencies.borrow_mut().push(path.to_owned());
read(path).map(Buffer::from)
})
.clone()
}
fn today(&self, offset: Option<i64>) -> Option<Datetime> {
if self.today.get().is_none() {
let datetime = match offset {
*self.today.get_or_init(|| {
let naive = match offset {
None => chrono::Local::now().naive_local(),
Some(o) => (chrono::Utc::now() + chrono::Duration::hours(o)).naive_utc(),
};
self.today.set(Some(Datetime::from_ymd(
datetime.year(),
datetime.month().try_into().ok()?,
datetime.day().try_into().ok()?,
)?))
}
self.today.get()
Datetime::from_ymd(
naive.year(),
naive.month().try_into().ok()?,
naive.day().try_into().ok()?,
)
})
}
}
impl SystemWorld {
/// Access the canonical slot for the given path.
#[tracing::instrument(skip_all)]
fn slot(&self, path: &Path) -> FileResult<RefMut<PathSlot>> {
let mut hashes = self.hashes.borrow_mut();
let hash = match hashes.get(path).cloned() {
Some(hash) => hash,
None => {
let hash = PathHash::new(path);
if let Ok(canon) = path.canonicalize() {
hashes.insert(canon.normalize(), hash.clone());
}
hashes.insert(path.into(), hash.clone());
hash
}
}?;
fn slot(&self, id: FileId) -> FileResult<RefMut<PathSlot>> {
let mut system_path = PathBuf::new();
let hash = self
.hashes
.borrow_mut()
.entry(id)
.or_insert_with(|| {
// Determine the root path relative to which the file path
// will be resolved.
let root = match id.package() {
Some(spec) => prepare_package(spec)?,
None => self.root.clone(),
};
Ok(std::cell::RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(hash).or_default()
// Join the path to the root. If it tries to escape, deny
// access. Note: It can still escape via symlinks.
system_path =
root.join_rooted(id.path()).ok_or(FileError::AccessDenied)?;
PathHash::new(&system_path)
})
.clone()?;
Ok(RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(hash).or_insert_with(|| PathSlot {
// This will only trigger if the `or_insert_with` above also
// triggered.
system_path,
source: OnceCell::new(),
buffer: OnceCell::new(),
})
}))
}
/// Collect all paths the last compilation depended on.
#[tracing::instrument(skip_all)]
fn insert(&self, path: &Path, text: String) -> SourceId {
let id = SourceId::from_u16(self.sources.len() as u16);
let source = Source::new(id, path, text);
self.sources.push(Box::new(source));
id
fn dependencies(&self) -> HashSet<PathBuf> {
self.paths
.borrow()
.values()
.map(|slot| slot.system_path.clone())
.collect()
}
fn relevant(&mut self, event: &notify::Event) -> bool {
match &event.kind {
notify::EventKind::Any => {}
notify::EventKind::Access(_) => return false,
notify::EventKind::Create(_) => return true,
notify::EventKind::Modify(kind) => match kind {
notify::event::ModifyKind::Any => {}
notify::event::ModifyKind::Data(_) => {}
notify::event::ModifyKind::Metadata(_) => return false,
notify::event::ModifyKind::Name(_) => return true,
notify::event::ModifyKind::Other => return false,
},
notify::EventKind::Remove(_) => {}
notify::EventKind::Other => return false,
/// Adjust the file watching. Watches all new dependencies and unwatches
/// all `previous` dependencies that are not relevant anymore.
#[tracing::instrument(skip_all)]
fn watch(
&self,
watcher: &mut dyn Watcher,
mut previous: HashSet<PathBuf>,
) -> StrResult<()> {
// Watch new paths that weren't watched yet.
for slot in self.paths.borrow().values() {
let path = &slot.system_path;
let watched = previous.remove(path);
if path.exists() && !watched {
tracing::info!("Watching {}", path.display());
watcher
.watch(path, RecursiveMode::NonRecursive)
.map_err(|_| eco_format!("failed to watch {path:?}"))?;
}
}
event.paths.iter().any(|path| self.dependant(path))
}
fn dependant(&self, path: &Path) -> bool {
self.hashes.borrow().contains_key(&path.normalize())
|| PathHash::new(path)
.map_or(false, |hash| self.paths.borrow().contains_key(&hash))
// Unwatch old paths that don't need to be watched anymore.
for path in previous {
tracing::info!("Unwatching {}", path.display());
watcher.unwatch(&path).ok();
}
Ok(())
}
/// Reset th compilation state in preparation of a new compilation.
#[tracing::instrument(skip_all)]
fn reset(&mut self) {
self.sources.as_mut().clear();
self.hashes.borrow_mut().clear();
self.paths.borrow_mut().clear();
self.today.set(None);
self.dependencies.borrow_mut().clear();
self.today.take();
}
// Return a list of files the document depends on
fn dependencies(&self) -> Vec<PathBuf> {
self.dependencies.borrow().clone()
/// Lookup a source file by id.
#[track_caller]
fn lookup(&self, id: FileId) -> Source {
self.source(id).expect("file id does not point to any source file")
}
}
@ -743,21 +707,130 @@ fn read(path: &Path) -> FileResult<Vec<u8>> {
}
}
/// Decode UTF-8 with an optional BOM.
fn decode_utf8(buf: Vec<u8>) -> FileResult<String> {
Ok(if buf.starts_with(b"\xef\xbb\xbf") {
// Remove UTF-8 BOM.
std::str::from_utf8(&buf[3..])?.into()
} else {
// Assume UTF-8.
String::from_utf8(buf)?
})
}
/// Make a package available in the on-disk cache.
fn prepare_package(spec: &PackageSpec) -> PackageResult<PathBuf> {
let subdir =
format!("typst/packages/{}/{}-{}", spec.namespace, spec.name, spec.version);
if let Some(data_dir) = dirs::data_dir() {
let dir = data_dir.join(&subdir);
if dir.exists() {
return Ok(dir);
}
}
if let Some(cache_dir) = dirs::cache_dir() {
let dir = cache_dir.join(&subdir);
// Download from network if it doesn't exist yet.
if spec.namespace == "preview" && !dir.exists() {
download_package(spec, &dir)?;
}
if dir.exists() {
return Ok(dir);
}
}
Err(PackageError::NotFound(spec.clone()))
}
/// Download a package over the network.
fn download_package(spec: &PackageSpec, package_dir: &Path) -> PackageResult<()> {
// The `@preview` namespace is the only namespace that supports on-demand
// fetching.
assert_eq!(spec.namespace, "preview");
let url = format!(
"https://packages.typst.org/preview/{}-{}.tar.gz",
spec.name, spec.version
);
print_downloading(spec).unwrap();
let reader = match ureq::get(&url).call() {
Ok(response) => response.into_reader(),
Err(ureq::Error::Status(404, _)) => {
return Err(PackageError::NotFound(spec.clone()))
}
Err(_) => return Err(PackageError::NetworkFailed),
};
let decompressed = flate2::read::GzDecoder::new(reader);
tar::Archive::new(decompressed).unpack(package_dir).map_err(|_| {
fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive
})
}
/// Print that a package downloading is happening.
fn print_downloading(spec: &PackageSpec) -> io::Result<()> {
let mut w = color_stream();
let styles = term::Styles::default();
w.set_color(&styles.header_help)?;
write!(w, "downloading")?;
w.reset()?;
writeln!(w, " {spec}")
}
/// Opens the given file using:
/// - The default file viewer if `open` is `None`.
/// - The given viewer provided by `open` if it is `Some`.
fn open_file(open: Option<&str>, path: &Path) -> StrResult<()> {
if let Some(app) = open {
open::with_in_background(path, app);
} else {
open::that_in_background(path);
}
Ok(())
}
/// Whether a watch event is relevant for compilation.
fn is_event_relevant(event: &notify::Event) -> bool {
match &event.kind {
notify::EventKind::Any => true,
notify::EventKind::Access(_) => false,
notify::EventKind::Create(_) => true,
notify::EventKind::Modify(kind) => match kind {
notify::event::ModifyKind::Any => true,
notify::event::ModifyKind::Data(_) => true,
notify::event::ModifyKind::Metadata(_) => false,
notify::event::ModifyKind::Name(_) => true,
notify::event::ModifyKind::Other => false,
},
notify::EventKind::Remove(_) => true,
notify::EventKind::Other => false,
}
}
impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
type FileId = SourceId;
type Name = std::path::Display<'a>;
type Source = &'a str;
type FileId = FileId;
type Name = FileId;
type Source = Source;
fn name(&'a self, id: SourceId) -> CodespanResult<Self::Name> {
Ok(World::source(self, id).path().display())
fn name(&'a self, id: FileId) -> CodespanResult<Self::Name> {
Ok(id)
}
fn source(&'a self, id: SourceId) -> CodespanResult<Self::Source> {
Ok(World::source(self, id).text())
fn source(&'a self, id: FileId) -> CodespanResult<Self::Source> {
Ok(self.lookup(id))
}
fn line_index(&'a self, id: SourceId, given: usize) -> CodespanResult<usize> {
let source = World::source(self, id);
fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult<usize> {
let source = self.lookup(id);
source
.byte_to_line(given)
.ok_or_else(|| CodespanError::IndexTooLarge {
@ -768,10 +841,10 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
fn line_range(
&'a self,
id: SourceId,
id: FileId,
given: usize,
) -> CodespanResult<std::ops::Range<usize>> {
let source = World::source(self, id);
let source = self.lookup(id);
source
.line_to_range(given)
.ok_or_else(|| CodespanError::LineTooLarge { given, max: source.len_lines() })
@ -779,11 +852,11 @@ impl<'a> codespan_reporting::files::Files<'a> for SystemWorld {
fn column_number(
&'a self,
id: SourceId,
id: FileId,
_: usize,
given: usize,
) -> CodespanResult<usize> {
let source = World::source(self, id);
let source = self.lookup(id);
source.byte_to_column(given).ok_or_else(|| {
let max = source.len_bytes();
if given <= max {
@ -823,7 +896,7 @@ impl FontSearcher {
#[cfg(feature = "embed-fonts")]
fn search_embedded(&mut self) {
let mut search = |bytes: &'static [u8]| {
let buffer = Buffer::from_static(bytes);
let buffer = Bytes::from_static(bytes);
for (i, font) in Font::iter(buffer).enumerate() {
self.book.push(font.info().clone());
self.fonts.push(FontSlot {
@ -852,45 +925,36 @@ impl FontSearcher {
}
/// Search for fonts in the linux system font directories.
#[cfg(all(unix, not(target_os = "macos")))]
fn search_system(&mut self) {
self.search_dir("/usr/share/fonts");
self.search_dir("/usr/local/share/fonts");
if cfg!(target_os = "macos") {
self.search_dir("/Library/Fonts");
self.search_dir("/Network/Library/Fonts");
self.search_dir("/System/Library/Fonts");
} else if cfg!(unix) {
self.search_dir("/usr/share/fonts");
self.search_dir("/usr/local/share/fonts");
} else if cfg!(windows) {
self.search_dir(
env::var_os("WINDIR")
.map(PathBuf::from)
.unwrap_or_else(|| "C:\\Windows".into())
.join("Fonts"),
);
if let Some(roaming) = dirs::config_dir() {
self.search_dir(roaming.join("Microsoft\\Windows\\Fonts"));
}
if let Some(local) = dirs::cache_dir() {
self.search_dir(local.join("Microsoft\\Windows\\Fonts"));
}
}
if let Some(dir) = dirs::font_dir() {
self.search_dir(dir);
}
}
/// Search for fonts in the macOS system font directories.
#[cfg(target_os = "macos")]
fn search_system(&mut self) {
self.search_dir("/Library/Fonts");
self.search_dir("/Network/Library/Fonts");
self.search_dir("/System/Library/Fonts");
if let Some(dir) = dirs::font_dir() {
self.search_dir(dir);
}
}
/// Search for fonts in the Windows system font directories.
#[cfg(windows)]
fn search_system(&mut self) {
let windir =
std::env::var("WINDIR").unwrap_or_else(|_| "C:\\Windows".to_string());
self.search_dir(Path::new(&windir).join("Fonts"));
if let Some(roaming) = dirs::config_dir() {
self.search_dir(roaming.join("Microsoft\\Windows\\Fonts"));
}
if let Some(local) = dirs::cache_dir() {
self.search_dir(local.join("Microsoft\\Windows\\Fonts"));
}
}
/// Search for all fonts in a directory recursively.
fn search_dir(&mut self, path: impl AsRef<Path>) {
for entry in WalkDir::new(path)

View File

@ -6,10 +6,11 @@ use pulldown_cmark as md;
use typed_arena::Arena;
use typst::diag::FileResult;
use typst::eval::Datetime;
use typst::file::FileId;
use typst::font::{Font, FontBook};
use typst::geom::{Point, Size};
use typst::syntax::{Source, SourceId};
use typst::util::Buffer;
use typst::syntax::Source;
use typst::util::Bytes;
use typst::World;
use yaml_front_matter::YamlFrontMatter;
@ -414,7 +415,8 @@ fn code_block(resolver: &dyn Resolver, lang: &str, text: &str) -> Html {
return Html::new(format!("<pre>{}</pre>", highlighted.as_str()));
}
let source = Source::new(SourceId::from_u16(0), Path::new("main.typ"), compile);
let id = FileId::new(None, Path::new("main.typ"));
let source = Source::new(id, compile);
let world = DocWorld(source);
let mut frames = match typst::compile(&world) {
Ok(doc) => doc.pages,
@ -461,7 +463,7 @@ fn nest_heading(level: &mut md::HeadingLevel) {
};
}
/// World for example compilations.
/// A world for example compilations.
struct DocWorld(Source);
impl World for DocWorld {
@ -469,35 +471,31 @@ impl World for DocWorld {
&LIBRARY
}
fn main(&self) -> &Source {
&self.0
}
fn resolve(&self, _: &Path) -> FileResult<SourceId> {
unimplemented!()
}
fn source(&self, id: SourceId) -> &Source {
assert_eq!(id.as_u16(), 0, "invalid source id");
&self.0
}
fn book(&self) -> &Prehashed<FontBook> {
&FONTS.0
}
fn font(&self, id: usize) -> Option<Font> {
Some(FONTS.1[id].clone())
fn main(&self) -> Source {
self.0.clone()
}
fn file(&self, path: &Path) -> FileResult<Buffer> {
fn source(&self, _: FileId) -> FileResult<Source> {
Ok(self.0.clone())
}
fn file(&self, id: FileId) -> FileResult<Bytes> {
assert!(id.package().is_none());
Ok(FILES
.get_file(path)
.unwrap_or_else(|| panic!("failed to load {path:?}"))
.get_file(id.path())
.unwrap_or_else(|| panic!("failed to load {:?}", id.path().display()))
.contents()
.into())
}
fn font(&self, index: usize) -> Option<Font> {
Some(FONTS.1[index].clone())
}
fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap())
}

View File

@ -57,6 +57,7 @@ pub fn provide(resolver: &dyn Resolver) -> Vec<PageModel> {
tutorial_pages(resolver),
reference_pages(resolver),
guides_pages(resolver),
packages_page(),
markdown_page(resolver, "/docs/", "general/changelog.md"),
markdown_page(resolver, "/docs/", "general/community.md"),
]
@ -118,6 +119,7 @@ pub enum BodyModel {
Funcs(FuncsModel),
Type(TypeModel),
Symbols(SymbolsModel),
Packages,
}
/// Build the tutorial.
@ -133,14 +135,6 @@ fn tutorial_pages(resolver: &dyn Resolver) -> PageModel {
page
}
/// Build the guides section.
fn guides_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "guides/welcome.md");
page.children =
vec![markdown_page(resolver, "/docs/guides/", "guides/guide-for-latex-users.md")];
page
}
/// Build the reference.
fn reference_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "reference/welcome.md");
@ -164,6 +158,27 @@ fn reference_pages(resolver: &dyn Resolver) -> PageModel {
page
}
/// Build the guides section.
fn guides_pages(resolver: &dyn Resolver) -> PageModel {
let mut page = markdown_page(resolver, "/docs/", "guides/welcome.md");
page.children =
vec![markdown_page(resolver, "/docs/guides/", "guides/guide-for-latex-users.md")];
page
}
/// Build the packages section.
fn packages_page() -> PageModel {
PageModel {
route: "/docs/packages/".into(),
title: "Packages".into(),
description: "Packages for Typst.".into(),
part: None,
outline: vec![],
body: BodyModel::Packages,
children: vec![],
}
}
/// Create a page from a markdown file.
#[track_caller]
fn markdown_page(

View File

@ -25,8 +25,8 @@ pub fn read(
vm: &mut Vm,
) -> SourceResult<Str> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let text = std::str::from_utf8(&data)
.map_err(|_| "file is not valid utf-8")
.at(span)?;
@ -66,8 +66,8 @@ pub fn csv(
vm: &mut Vm,
) -> SourceResult<Array> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let mut builder = csv::ReaderBuilder::new();
builder.has_headers(false);
@ -177,8 +177,8 @@ pub fn json(
vm: &mut Vm,
) -> SourceResult<Value> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let value: serde_json::Value =
serde_json::from_slice(&data).map_err(format_json_error).at(span)?;
Ok(convert_json(value))
@ -243,8 +243,8 @@ pub fn toml(
vm: &mut Vm,
) -> SourceResult<Value> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let raw = std::str::from_utf8(&data)
.map_err(|_| "file is not valid utf-8")
@ -352,8 +352,8 @@ pub fn yaml(
vm: &mut Vm,
) -> SourceResult<Value> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let value: serde_yaml::Value =
serde_yaml::from_slice(&data).map_err(format_yaml_error).at(span)?;
Ok(convert_yaml(value))
@ -455,8 +455,8 @@ pub fn xml(
vm: &mut Vm,
) -> SourceResult<Value> {
let Spanned { v: path, span } = path;
let path = vm.locate(&path).at(span)?;
let data = vm.world().file(&path).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
let text = std::str::from_utf8(&data).map_err(FileError::from).at(span)?;
let document = roxmltree::Document::parse(text).map_err(format_xml_error).at(span)?;
Ok(convert_xml(document.root()))

View File

@ -7,7 +7,8 @@ use ecow::{eco_vec, EcoVec};
use hayagriva::io::{BibLaTeXError, YamlBibliographyError};
use hayagriva::style::{self, Brackets, Citation, Database, DisplayString, Formatting};
use hayagriva::Entry;
use typst::util::option_eq;
use typst::diag::FileError;
use typst::util::{option_eq, Bytes};
use super::{LinkElem, LocalName, RefElem};
use crate::layout::{BlockElem, GridElem, ParElem, Sizing, TrackSizings, VElem};
@ -49,18 +50,31 @@ pub struct BibliographyElem {
/// Path to a Hayagriva `.yml` or BibLaTeX `.bib` file.
#[required]
#[parse(
let Spanned { v: mut paths, span } =
let Spanned { v: paths, span } =
args.expect::<Spanned<BibPaths>>("path to bibliography file")?;
for path in &mut paths.0 {
// resolve paths
*path = vm.locate(path).at(span)?.to_string_lossy().into();
}
// check that parsing works
let _ = load(vm.world(), &paths).at(span)?;
// Load bibliography files.
let data = paths.0
.iter()
.map(|path| {
let id = vm.location().join(path).at(span)?;
vm.world().file(id).at(span)
})
.collect::<SourceResult<Vec<Bytes>>>()?;
// Check that parsing works.
let _ = load(&paths, &data).at(span)?;
paths
)]
pub path: BibPaths,
/// The raw file buffers.
#[internal]
#[required]
#[parse(data)]
pub data: Vec<Bytes>,
/// The title of the bibliography.
///
/// - When set to `{auto}`, an appropriate title for the [text
@ -80,7 +94,7 @@ pub struct BibliographyElem {
pub style: BibliographyStyle,
}
/// A list of bib file paths.
/// A list of bibliography file paths.
#[derive(Debug, Default, Clone, Hash)]
pub struct BibPaths(Vec<EcoString>);
@ -111,18 +125,20 @@ impl BibliographyElem {
vt.introspector
.query(&Self::func().select())
.into_iter()
.flat_map(|elem| load(vt.world, &elem.to::<Self>().unwrap().path()))
.flat_map(|elem| {
let elem = elem.to::<Self>().unwrap();
load(&elem.path(), &elem.data())
})
.flatten()
.any(|entry| entry.key() == key)
}
/// Find all bibliography keys.
pub fn keys(
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
) -> Vec<(EcoString, Option<EcoString>)> {
Self::find(introspector)
.and_then(|elem| load(world, &elem.path()))
.and_then(|elem| load(&elem.path(), &elem.data()))
.into_iter()
.flatten()
.map(|entry| {
@ -425,19 +441,15 @@ impl Works {
_ => elem.to::<CiteElem>().unwrap().clone(),
})
.collect();
Ok(create(vt.world, bibliography, citations))
Ok(create(bibliography, citations))
}
}
/// Generate all citations and the whole bibliography.
#[comemo::memoize]
fn create(
world: Tracked<dyn World + '_>,
bibliography: BibliographyElem,
citations: Vec<CiteElem>,
) -> Arc<Works> {
fn create(bibliography: BibliographyElem, citations: Vec<CiteElem>) -> Arc<Works> {
let span = bibliography.span();
let entries = load(world, &bibliography.path()).unwrap();
let entries = load(&bibliography.path(), &bibliography.data()).unwrap();
let style = bibliography.style(StyleChain::default());
let bib_location = bibliography.0.location().unwrap();
let ref_location = |target: &Entry| {
@ -587,16 +599,12 @@ fn create(
/// Load bibliography entries from a path.
#[comemo::memoize]
fn load(
world: Tracked<dyn World + '_>,
paths: &BibPaths,
) -> StrResult<EcoVec<hayagriva::Entry>> {
fn load(paths: &BibPaths, data: &[Bytes]) -> StrResult<EcoVec<hayagriva::Entry>> {
let mut result = EcoVec::new();
// We might have multiple bib/yaml files
for path in &paths.0 {
let buffer = world.file(Path::new(path.as_str()))?;
let src = std::str::from_utf8(&buffer).map_err(|_| "file is not valid utf-8")?;
for (path, bytes) in paths.0.iter().zip(data) {
let src = std::str::from_utf8(bytes).map_err(|_| FileError::InvalidUtf8)?;
let entries = parse_bib(path, src)?;
result.extend(entries);
}

View File

@ -189,7 +189,7 @@ impl Show for RefElem {
)
})
.hint(eco_format!(
"did you mean to use `#set {}(numbering: \"1.\")`?",
"you can enable heading numbering with `#set {}(numbering: \"1.\")`?",
elem.func().name()
))
.at(span)?;

View File

@ -19,6 +19,8 @@ pub use typst::eval::{
Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm,
};
#[doc(no_inline)]
pub use typst::file::FileId;
#[doc(no_inline)]
pub use typst::geom::*;
#[doc(no_inline)]
pub use typst::model::{

View File

@ -2,6 +2,7 @@ use std::ffi::OsStr;
use std::path::Path;
use typst::image::{Image, ImageFormat, RasterFormat, VectorFormat};
use typst::util::Bytes;
use crate::meta::{Figurable, LocalName};
use crate::prelude::*;
@ -37,12 +38,18 @@ pub struct ImageElem {
#[parse(
let Spanned { v: path, span } =
args.expect::<Spanned<EcoString>>("path to image file")?;
let path: EcoString = vm.locate(&path).at(span)?.to_string_lossy().into();
let _ = load(vm.world(), &path, None, None).at(span)?;
let id = vm.location().join(&path).at(span)?;
let data = vm.world().file(id).at(span)?;
path
)]
pub path: EcoString,
/// The raw file data.
#[internal]
#[required]
#[parse(data)]
pub data: Bytes,
/// The width of the image.
pub width: Smart<Rel<Length>>,
@ -65,10 +72,29 @@ impl Layout for ImageElem {
styles: StyleChain,
regions: Regions,
) -> SourceResult<Fragment> {
let first = families(styles).next();
let fallback_family = first.as_ref().map(|f| f.as_str());
let image =
load(vt.world, &self.path(), fallback_family, self.alt(styles)).unwrap();
let ext = Path::new(self.path().as_str())
.extension()
.and_then(OsStr::to_str)
.unwrap_or_default()
.to_lowercase();
let format = match ext.as_str() {
"png" => ImageFormat::Raster(RasterFormat::Png),
"jpg" | "jpeg" => ImageFormat::Raster(RasterFormat::Jpg),
"gif" => ImageFormat::Raster(RasterFormat::Gif),
"svg" | "svgz" => ImageFormat::Vector(VectorFormat::Svg),
_ => bail!(self.span(), "unknown image format"),
};
let image = Image::with_fonts(
self.data(),
format,
vt.world,
families(styles).next().as_ref().map(|f| f.as_str()),
self.alt(styles),
)
.at(self.span())?;
let sizing = Axes::new(self.width(styles), self.height(styles));
let region = sizing
.zip(regions.base())
@ -169,24 +195,3 @@ pub enum ImageFit {
/// this means that the image will be distorted.
Stretch,
}
/// Load an image from a path.
#[comemo::memoize]
fn load(
world: Tracked<dyn World + '_>,
full: &str,
fallback_family: Option<&str>,
alt: Option<EcoString>,
) -> StrResult<Image> {
let full = Path::new(full);
let buffer = world.file(full)?;
let ext = full.extension().and_then(OsStr::to_str).unwrap_or_default();
let format = match ext.to_lowercase().as_str() {
"png" => ImageFormat::Raster(RasterFormat::Png),
"jpg" | "jpeg" => ImageFormat::Raster(RasterFormat::Jpg),
"gif" => ImageFormat::Raster(RasterFormat::Gif),
"svg" | "svgz" => ImageFormat::Vector(VectorFormat::Svg),
_ => bail!("unknown image format"),
};
Image::with_fonts(buffer, format, world, fallback_family, alt)
}

View File

@ -2,14 +2,14 @@
use std::fmt::{self, Display, Formatter};
use std::io;
use std::ops::Range;
use std::path::{Path, PathBuf};
use std::str::Utf8Error;
use std::string::FromUtf8Error;
use comemo::Tracked;
use crate::syntax::{ErrorPos, Span, Spanned};
use crate::file::PackageSpec;
use crate::syntax::{Span, Spanned};
use crate::World;
/// Early-return with a [`StrResult`] or [`SourceResult`].
@ -76,8 +76,6 @@ pub type SourceResult<T> = Result<T, Box<Vec<SourceError>>>;
pub struct SourceError {
/// The span of the erroneous node in the source code.
pub span: Span,
/// The position in the node where the error should be annotated.
pub pos: ErrorPos,
/// A diagnostic message describing the problem.
pub message: EcoString,
/// The trace of function calls leading to the error.
@ -92,36 +90,17 @@ impl SourceError {
pub fn new(span: Span, message: impl Into<EcoString>) -> Self {
Self {
span,
pos: ErrorPos::Full,
trace: vec![],
message: message.into(),
hints: vec![],
}
}
/// Adjust the position in the node where the error should be annotated.
pub fn with_pos(mut self, pos: ErrorPos) -> Self {
self.pos = pos;
self
}
/// Adds user-facing hints to the error.
pub fn with_hints(mut self, hints: impl IntoIterator<Item = EcoString>) -> Self {
self.hints.extend(hints);
self
}
/// The range in the source file identified by
/// [`self.span.source()`](Span::source) where the error should be
/// annotated.
pub fn range(&self, world: &dyn World) -> Range<usize> {
let full = world.source(self.span.source()).range(self.span);
match self.pos {
ErrorPos::Full => full,
ErrorPos::Start => full.start..full.start,
ErrorPos::End => full.end..full.end,
}
}
}
/// A part of an error's [trace](SourceError::trace).
@ -171,12 +150,17 @@ impl<T> Trace<T> for SourceResult<T> {
if span.is_detached() {
return errors;
}
let range = world.source(span.source()).range(span);
let trace_range = span.range(&*world);
for error in errors.iter_mut().filter(|e| !e.span.is_detached()) {
// Skip traces that surround the error.
let error_range = world.source(error.span.source()).range(error.span);
if range.start <= error_range.start && range.end >= error_range.end {
continue;
if error.span.id() == span.id() {
let error_range = error.span.range(&*world);
if trace_range.start <= error_range.start
&& trace_range.end >= error_range.end
{
continue;
}
}
error.trace.push(Spanned::new(make_point(), span));
@ -262,6 +246,8 @@ pub enum FileError {
NotSource,
/// The file was not valid UTF-8, but should have been.
InvalidUtf8,
/// The package the file is part of could not be loaded.
Package(PackageError),
/// Another error.
Other,
}
@ -294,6 +280,7 @@ impl Display for FileError {
Self::IsDirectory => f.pad("failed to load file (is a directory)"),
Self::NotSource => f.pad("not a typst source file"),
Self::InvalidUtf8 => f.pad("file is not valid utf-8"),
Self::Package(error) => error.fmt(f),
Self::Other => f.pad("failed to load file"),
}
}
@ -311,12 +298,54 @@ impl From<FromUtf8Error> for FileError {
}
}
impl From<PackageError> for FileError {
fn from(error: PackageError) -> Self {
Self::Package(error)
}
}
impl From<FileError> for EcoString {
fn from(error: FileError) -> Self {
eco_format!("{error}")
}
}
/// A result type with a package-related error.
pub type PackageResult<T> = Result<T, PackageError>;
/// An error that occured while trying to load a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum PackageError {
/// The specified package does not exist.
NotFound(PackageSpec),
/// Failed to retrieve the package through the network.
NetworkFailed,
/// The package archive was malformed.
MalformedArchive,
/// Another error.
Other,
}
impl std::error::Error for PackageError {}
impl Display for PackageError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::NotFound(spec) => {
write!(f, "package not found (searched for {spec})",)
}
Self::NetworkFailed => f.pad("failed to load package (network failed)"),
Self::MalformedArchive => f.pad("failed to load package (archive malformed)"),
Self::Other => f.pad("failed to load package"),
}
}
}
impl From<PackageError> for EcoString {
fn from(error: PackageError) -> Self {
eco_format!("{error}")
}
}
/// Format a user-facing error message for an XML-like file format.
pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString {
match error {

View File

@ -11,9 +11,10 @@ use super::{
Value, Vm,
};
use crate::diag::{bail, SourceResult, StrResult};
use crate::file::FileId;
use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt};
use crate::syntax::ast::{self, AstNode, Expr, Ident};
use crate::syntax::{SourceId, Span, SyntaxNode};
use crate::syntax::{Span, SyntaxNode};
use crate::World;
/// An evaluatable function.
@ -125,7 +126,6 @@ impl Func {
args: impl IntoIterator<Item = T>,
) -> SourceResult<Value> {
let route = Route::default();
let id = SourceId::detached();
let scopes = Scopes::new(None);
let mut locator = Locator::chained(vt.locator.track());
let vt = Vt {
@ -135,7 +135,7 @@ impl Func {
delayed: TrackedMut::reborrow_mut(&mut vt.delayed),
tracer: TrackedMut::reborrow_mut(&mut vt.tracer),
};
let mut vm = Vm::new(vt, route.track(), id, scopes);
let mut vm = Vm::new(vt, route.track(), FileId::detached(), scopes);
let args = Args::new(self.span(), args);
self.call_vm(&mut vm, args)
}
@ -297,7 +297,7 @@ pub struct ParamInfo {
#[derive(Hash)]
pub(super) struct Closure {
/// The source file where the closure was defined.
pub location: SourceId,
pub location: FileId,
/// The name of the closure.
pub name: Option<Ident>,
/// Captured values from outer scopes.

View File

@ -13,7 +13,6 @@ use crate::geom::{Abs, Dir};
use crate::model::{Content, ElemFunc, Introspector, Label, StyleChain, Styles, Vt};
use crate::syntax::Span;
use crate::util::hash128;
use crate::World;
/// Definition of Typst's standard library.
#[derive(Debug, Clone, Hash)]
@ -66,10 +65,8 @@ pub struct LangItems {
pub reference: fn(target: Label, supplement: Option<Content>) -> Content,
/// The keys contained in the bibliography and short descriptions of them.
#[allow(clippy::type_complexity)]
pub bibliography_keys: fn(
world: Tracked<dyn World + '_>,
introspector: Tracked<Introspector>,
) -> Vec<(EcoString, Option<EcoString>)>,
pub bibliography_keys:
fn(introspector: Tracked<Introspector>) -> Vec<(EcoString, Option<EcoString>)>,
/// A section heading: `= Introduction`.
pub heading: fn(level: NonZeroUsize, body: Content) -> Content,
/// The heading function.

View File

@ -55,27 +55,24 @@ pub use self::value::{Dynamic, Type, Value};
use std::collections::HashSet;
use std::mem;
use std::path::{Path, PathBuf};
use std::path::Path;
use comemo::{Track, Tracked, TrackedMut, Validate};
use ecow::{EcoString, EcoVec};
use unicode_segmentation::UnicodeSegmentation;
use self::func::{CapturesVisitor, Closure};
use crate::diag::{
bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint,
};
use crate::file::{FileId, PackageManifest, PackageSpec};
use crate::model::{
Content, Introspector, Label, Locator, Recipe, ShowableSelector, Styles, Transform,
Unlabellable, Vt,
Content, DelayedErrors, Introspector, Label, Locator, Recipe, ShowableSelector,
Styles, Transform, Unlabellable, Vt,
};
use crate::syntax::ast::AstNode;
use crate::syntax::{
ast, parse_code, Source, SourceId, Span, Spanned, SyntaxKind, SyntaxNode,
};
use crate::util::PathExt;
use crate::syntax::ast::{self, AstNode};
use crate::syntax::{parse_code, Source, Span, Spanned, SyntaxKind, SyntaxNode};
use crate::World;
use crate::{
diag::{bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint},
model::DelayedErrors,
};
const MAX_ITERATIONS: usize = 10_000;
const MAX_CALL_DEPTH: usize = 64;
@ -91,9 +88,8 @@ pub fn eval(
) -> SourceResult<Module> {
// Prevent cyclic evaluation.
let id = source.id();
let path = if id.is_detached() { Path::new("") } else { world.source(id).path() };
if route.contains(id) {
panic!("Tried to cyclicly evaluate {}", path.display());
panic!("Tried to cyclicly evaluate {}", id.path().display());
}
// Hook up the lang items.
@ -130,7 +126,7 @@ pub fn eval(
}
// Assemble the module.
let name = path.file_stem().unwrap_or_default().to_string_lossy();
let name = id.path().file_stem().unwrap_or_default().to_string_lossy();
Ok(Module::new(name).with_scope(vm.scopes.top).with_content(result?))
}
@ -166,7 +162,7 @@ pub fn eval_string(
// Prepare VM.
let route = Route::default();
let id = SourceId::detached();
let id = FileId::detached();
let scopes = Scopes::new(Some(world.library()));
let mut vm = Vm::new(vt, route.track(), id, scopes);
@ -194,7 +190,7 @@ pub struct Vm<'a> {
/// The route of source ids the VM took to reach its current location.
route: Tracked<'a, Route<'a>>,
/// The current location.
location: SourceId,
location: FileId,
/// A control flow event that is currently happening.
flow: Option<FlowEvent>,
/// The stack of scopes.
@ -210,7 +206,7 @@ impl<'a> Vm<'a> {
fn new(
vt: Vt<'a>,
route: Tracked<'a, Route>,
location: SourceId,
location: FileId,
scopes: Scopes<'a>,
) -> Self {
let traced = vt.tracer.span(location);
@ -232,6 +228,11 @@ impl<'a> Vm<'a> {
self.vt.world
}
/// The location to which paths are relative currently.
pub fn location(&self) -> FileId {
self.location
}
/// Define a variable in the current scope.
#[tracing::instrument(skip_all)]
pub fn define(&mut self, var: ast::Ident, value: impl IntoValue) {
@ -241,23 +242,6 @@ impl<'a> Vm<'a> {
}
self.scopes.top.define(var.take(), value);
}
/// Resolve a user-entered path to be relative to the compilation
/// environment's root.
#[tracing::instrument(skip_all)]
pub fn locate(&self, path: &str) -> StrResult<PathBuf> {
if !self.location.is_detached() {
if let Some(path) = path.strip_prefix('/') {
return Ok(self.world().root().join(path).normalize());
}
if let Some(dir) = self.world().source(self.location).path().parent() {
return Ok(dir.join(path).normalize());
}
}
bail!("cannot access file system from here")
}
}
/// A control flow event that occurred during evaluation.
@ -296,12 +280,12 @@ pub struct Route<'a> {
// covariant over the constraint. If it becomes invariant, we're in for a
// world of lifetime pain.
outer: Option<Tracked<'a, Self, <Route<'static> as Validate>::Constraint>>,
id: Option<SourceId>,
id: Option<FileId>,
}
impl<'a> Route<'a> {
/// Create a new route with just one entry.
pub fn new(id: SourceId) -> Self {
pub fn new(id: FileId) -> Self {
Self { id: Some(id), outer: None }
}
@ -309,7 +293,7 @@ impl<'a> Route<'a> {
///
/// You must guarantee that `outer` lives longer than the resulting
/// route is ever used.
pub fn insert(outer: Tracked<'a, Self>, id: SourceId) -> Self {
pub fn insert(outer: Tracked<'a, Self>, id: FileId) -> Self {
Route { outer: Some(outer), id: Some(id) }
}
@ -328,7 +312,7 @@ impl<'a> Route<'a> {
#[comemo::track]
impl<'a> Route<'a> {
/// Whether the given id is part of the route.
fn contains(&self, id: SourceId) -> bool {
fn contains(&self, id: FileId) -> bool {
self.id == Some(id) || self.outer.map_or(false, |outer| outer.contains(id))
}
}
@ -358,8 +342,8 @@ impl Tracer {
#[comemo::track]
impl Tracer {
/// The traced span if it is part of the given source file.
fn span(&self, id: SourceId) -> Option<Span> {
if self.span.map(Span::source) == Some(id) {
fn span(&self, id: FileId) -> Option<Span> {
if self.span.map(Span::id) == Some(id) {
self.span
} else {
None
@ -1764,20 +1748,49 @@ fn import(
}
};
// Handle package and file imports.
let path = path.as_str();
if path.starts_with('@') {
let spec = path.parse::<PackageSpec>().at(span)?;
import_package(vm, spec, span)
} else {
import_file(vm, path, span)
}
}
/// Import an external package.
fn import_package(vm: &mut Vm, spec: PackageSpec, span: Span) -> SourceResult<Module> {
// Evaluate the manifest.
let manifest_id = FileId::new(Some(spec.clone()), Path::new("/typst.toml"));
let bytes = vm.world().file(manifest_id).at(span)?;
let manifest = PackageManifest::parse(&bytes).at(span)?;
manifest.validate(&spec).at(span)?;
// Evaluate the entry point.
let entrypoint = Path::new("/").join(manifest.package.entrypoint.as_str());
let entrypoint_id = FileId::new(Some(spec), &entrypoint);
let source = vm.world().source(entrypoint_id).at(span)?;
let point = || Tracepoint::Import;
Ok(eval(vm.world(), vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), &source)
.trace(vm.world(), point, span)?
.with_name(manifest.package.name))
}
/// Import a file from a path.
fn import_file(vm: &mut Vm, path: &str, span: Span) -> SourceResult<Module> {
// Load the source file.
let world = vm.world();
let full = vm.locate(&path).at(span)?;
let id = world.resolve(&full).at(span)?;
let id = vm.location().join(path).at(span)?;
let source = world.source(id).at(span)?;
// Prevent cyclic importing.
if vm.route.contains(id) {
if vm.route.contains(source.id()) {
bail!(span, "cyclic import");
}
// Evaluate the file.
let source = world.source(id);
let point = || Tracepoint::Import;
eval(world, vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), source)
eval(world, vm.route, TrackedMut::reborrow_mut(&mut vm.vt.tracer), &source)
.trace(world, point, span)
}

View File

@ -7,15 +7,20 @@ use super::{Content, Scope, Value};
use crate::diag::StrResult;
/// An evaluated module, ready for importing or typesetting.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone, Hash)]
#[allow(clippy::derived_hash_with_manual_eq)]
pub struct Module(Arc<Repr>);
pub struct Module {
/// The module's name.
name: EcoString,
/// The reference-counted inner fields.
inner: Arc<Repr>,
}
/// The internal representation.
#[derive(Clone, Hash)]
struct Repr {
/// The module's name.
name: EcoString,
/// The top-level definitions that were bound in this module.
scope: Scope,
/// The module's layoutable contents.
@ -25,38 +30,43 @@ struct Repr {
impl Module {
/// Create a new module.
pub fn new(name: impl Into<EcoString>) -> Self {
Self(Arc::new(Repr {
Self {
name: name.into(),
scope: Scope::new(),
content: Content::empty(),
}))
inner: Arc::new(Repr { scope: Scope::new(), content: Content::empty() }),
}
}
/// Update the module's name.
pub fn with_name(mut self, name: impl Into<EcoString>) -> Self {
self.name = name.into();
self
}
/// Update the module's scope.
pub fn with_scope(mut self, scope: Scope) -> Self {
Arc::make_mut(&mut self.0).scope = scope;
Arc::make_mut(&mut self.inner).scope = scope;
self
}
/// Update the module's content.
pub fn with_content(mut self, content: Content) -> Self {
Arc::make_mut(&mut self.0).content = content;
Arc::make_mut(&mut self.inner).content = content;
self
}
/// Get the module's name.
pub fn name(&self) -> &EcoString {
&self.0.name
&self.name
}
/// Access the module's scope.
pub fn scope(&self) -> &Scope {
&self.0.scope
&self.inner.scope
}
/// Access the module's scope, mutably.
pub fn scope_mut(&mut self) -> &mut Scope {
&mut Arc::make_mut(&mut self.0).scope
&mut Arc::make_mut(&mut self.inner).scope
}
/// Try to access a definition in the module.
@ -68,7 +78,7 @@ impl Module {
/// Extract the module's content.
pub fn content(self) -> Content {
match Arc::try_unwrap(self.0) {
match Arc::try_unwrap(self.inner) {
Ok(repr) => repr.content,
Err(arc) => arc.content.clone(),
}
@ -83,6 +93,6 @@ impl Debug for Module {
impl PartialEq for Module {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.0, &other.0)
self.name == other.name && Arc::ptr_eq(&self.inner, &other.inner)
}
}

View File

@ -15,9 +15,10 @@ use crate::diag::StrResult;
use crate::geom::{Abs, Angle, Color, Em, Fr, Length, Ratio, Rel};
use crate::model::{Label, Styles};
use crate::syntax::{ast, Span};
use crate::util::Bytes;
/// A computational value.
#[derive(Clone, Default)]
#[derive(Default, Clone)]
pub enum Value {
/// The value that indicates the absence of a meaningful value.
#[default]
@ -46,6 +47,8 @@ pub enum Value {
Symbol(Symbol),
/// A string: `"string"`.
Str(Str),
/// Raw bytes.
Bytes(Bytes),
/// A label: `<intro>`.
Label(Label),
/// A content value: `[*Hi* there]`.
@ -103,6 +106,7 @@ impl Value {
Self::Color(_) => Color::TYPE_NAME,
Self::Symbol(_) => Symbol::TYPE_NAME,
Self::Str(_) => Str::TYPE_NAME,
Self::Bytes(_) => Bytes::TYPE_NAME,
Self::Label(_) => Label::TYPE_NAME,
Self::Content(_) => Content::TYPE_NAME,
Self::Styles(_) => Styles::TYPE_NAME,
@ -186,6 +190,7 @@ impl Debug for Value {
Self::Color(v) => Debug::fmt(v, f),
Self::Symbol(v) => Debug::fmt(v, f),
Self::Str(v) => Debug::fmt(v, f),
Self::Bytes(v) => Debug::fmt(v, f),
Self::Label(v) => Debug::fmt(v, f),
Self::Content(v) => Debug::fmt(v, f),
Self::Styles(v) => Debug::fmt(v, f),
@ -228,6 +233,7 @@ impl Hash for Value {
Self::Color(v) => v.hash(state),
Self::Symbol(v) => v.hash(state),
Self::Str(v) => v.hash(state),
Self::Bytes(v) => v.hash(state),
Self::Label(v) => v.hash(state),
Self::Content(v) => v.hash(state),
Self::Styles(v) => v.hash(state),
@ -400,6 +406,7 @@ primitive! {
Str,
Symbol(symbol) => symbol.get().into()
}
primitive! { Bytes: "bytes", Bytes }
primitive! { Label: "label", Label }
primitive! { Content: "content",
Content,

View File

@ -8,7 +8,7 @@ use unicode_general_category::GeneralCategory;
use super::{deflate, EmExt, PdfContext, RefExt};
use crate::font::Font;
use crate::util::{Buffer, SliceExt};
use crate::util::{Bytes, SliceExt};
const CMAP_NAME: Name = Name(b"Custom");
const SYSTEM_INFO: SystemInfo = SystemInfo {
@ -154,7 +154,7 @@ pub fn write_fonts(ctx: &mut PdfContext) {
/// Subset a font to the given glyphs.
#[comemo::memoize]
fn subset_font(font: &Font, glyphs: &[u16]) -> Buffer {
fn subset_font(font: &Font, glyphs: &[u16]) -> Bytes {
let data = font.data();
let profile = subsetter::Profile::pdf(glyphs);
let subsetted = subsetter::subset(data, font.index(), profile);

View File

@ -5,7 +5,7 @@ use pdf_writer::{Filter, Finish};
use super::{deflate, PdfContext, RefExt};
use crate::image::{DecodedImage, Image, RasterFormat};
use crate::util::Buffer;
use crate::util::Bytes;
/// Embed all used images into the PDF.
#[tracing::instrument(skip_all)]
@ -89,7 +89,7 @@ pub fn write_images(ctx: &mut PdfContext) {
/// Skips the alpha channel as that's encoded separately.
#[comemo::memoize]
#[tracing::instrument(skip_all)]
fn encode_image(image: &Image) -> (Buffer, Filter, bool) {
fn encode_image(image: &Image) -> (Bytes, Filter, bool) {
let decoded = image.decoded();
let (dynamic, format) = match decoded.as_ref() {
DecodedImage::Raster(dynamic, _, format) => (dynamic, *format),

285
src/file.rs Normal file
View File

@ -0,0 +1,285 @@
//! File and package management.
use std::collections::HashMap;
use std::fmt::{self, Debug, Display, Formatter};
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::RwLock;
use ecow::{eco_format, EcoString};
use once_cell::sync::Lazy;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::diag::{bail, FileError, StrResult};
use crate::syntax::is_ident;
use crate::util::PathExt;
/// The global package-path interner.
static INTERNER: Lazy<RwLock<Interner>> =
Lazy::new(|| RwLock::new(Interner { to_id: HashMap::new(), from_id: Vec::new() }));
/// A package-path interner.
struct Interner {
to_id: HashMap<Pair, FileId>,
from_id: Vec<Pair>,
}
/// An interned pair of a package specification and a path.
type Pair = &'static (Option<PackageSpec>, PathBuf);
/// Identifies a file.
///
/// This type is interned and thus cheap to clone, compare, and hash.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct FileId(u16);
impl FileId {
/// Create a new interned file specification.
///
/// Normalizes the path before interning.
pub fn new(package: Option<PackageSpec>, path: &Path) -> Self {
let pair = (package, path.normalize());
let mut interner = INTERNER.write().unwrap();
interner.to_id.get(&pair).copied().unwrap_or_else(|| {
let leaked = Box::leak(Box::new(pair));
let len = interner.from_id.len();
if len >= usize::from(u16::MAX) {
panic!("too many file specifications");
}
let id = FileId(len as u16);
interner.to_id.insert(leaked, id);
interner.from_id.push(leaked);
id
})
}
/// Get an id that does not identify any real file.
pub const fn detached() -> Self {
Self(u16::MAX)
}
/// Whether the id is the detached.
pub const fn is_detached(self) -> bool {
self.0 == Self::detached().0
}
/// The package the file resides in, if any.
pub fn package(&self) -> Option<&'static PackageSpec> {
if self.is_detached() {
None
} else {
self.pair().0.as_ref()
}
}
/// The normalized path to the file (within the package if there's a
/// package).
pub fn path(&self) -> &'static Path {
if self.is_detached() {
Path::new("<detached>")
} else {
&self.pair().1
}
}
/// Resolve a file location relative to this file.
pub fn join(self, path: &str) -> StrResult<Self> {
if self.is_detached() {
bail!("cannot access file system from here");
}
let package = self.package().cloned();
let base = self.path();
Ok(if let Some(parent) = base.parent() {
Self::new(package, &parent.join(path))
} else {
Self::new(package, Path::new(path))
})
}
/// Construct from a raw number.
pub(crate) const fn from_u16(v: u16) -> Self {
Self(v)
}
/// Extract the raw underlying number.
pub(crate) const fn as_u16(self) -> u16 {
self.0
}
/// Get the static pair.
fn pair(&self) -> Pair {
INTERNER.read().unwrap().from_id[usize::from(self.0)]
}
}
impl Display for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let path = self.path().display();
match self.package() {
Some(package) => write!(f, "{package}/{path}"),
None => write!(f, "{path}"),
}
}
}
impl Debug for FileId {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(self, f)
}
}
/// Identifies a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PackageSpec {
/// The namespace the package lives in.
pub namespace: EcoString,
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
}
impl FromStr for PackageSpec {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut s = unscanny::Scanner::new(s);
if !s.eat_if('@') {
bail!("package specification must start with '@'");
}
let namespace = s.eat_until('/');
if namespace.is_empty() {
bail!("package specification is missing namespace");
} else if !is_ident(namespace) {
bail!("`{namespace}` is not a valid package namespace");
}
s.eat_if('/');
let name = s.eat_until(':');
if name.is_empty() {
bail!("package specification is missing name");
} else if !is_ident(name) {
bail!("`{name}` is not a valid package name");
}
s.eat_if(':');
let version = s.after();
if version.is_empty() {
bail!("package specification is missing version");
}
Ok(Self {
namespace: namespace.into(),
name: name.into(),
version: version.parse()?,
})
}
}
impl Display for PackageSpec {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "@{}/{}:{}", self.namespace, self.name, self.version)
}
}
/// A package's version.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Version {
/// The package's major version.
pub major: u32,
/// The package's minor version.
pub minor: u32,
/// The package's patch version.
pub patch: u32,
}
impl FromStr for Version {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.');
let mut next = |kind| {
let Some(part) = parts.next().filter(|s| !s.is_empty()) else {
bail!("version number is missing {kind} version");
};
part.parse::<u32>()
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
};
let major = next("major")?;
let minor = next("minor")?;
let patch = next("patch")?;
if let Some(rest) = parts.next() {
bail!("version number has unexpected fourth component: `{rest}`");
}
Ok(Self { major, minor, patch })
}
}
impl Display for Version {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl Serialize for Version {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.collect_str(self)
}
}
impl<'de> Deserialize<'de> for Version {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let string = EcoString::deserialize(d)?;
string.parse().map_err(serde::de::Error::custom)
}
}
/// A parsed package manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageManifest {
/// Details about the package itself.
pub package: PackageInfo,
}
impl PackageManifest {
/// Parse the manifest from raw bytes.
pub fn parse(bytes: &[u8]) -> StrResult<Self> {
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
toml::from_str(string).map_err(|err| {
eco_format!("package manifest is malformed: {}", err.message())
})
}
/// Ensure that this manifest is indeed for the specified package.
pub fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
if self.package.name != spec.name {
bail!("package manifest contains mismatched name `{}`", self.package.name);
}
if self.package.version != spec.version {
bail!(
"package manifest contains mismatched version {}",
self.package.version
);
}
Ok(())
}
}
/// The `package` key in the manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageInfo {
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
/// The path of the entrypoint into the package.
pub entrypoint: EcoString,
}

View File

@ -15,9 +15,11 @@ use ttf_parser::GlyphId;
use self::book::find_name;
use crate::eval::Cast;
use crate::geom::Em;
use crate::util::Buffer;
use crate::util::Bytes;
/// An OpenType font.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone)]
pub struct Font(Arc<Repr>);
@ -26,7 +28,7 @@ struct Repr {
/// The raw font data, possibly shared with other fonts from the same
/// collection. The vector's allocation must not move, because `ttf` points
/// into it using unsafe code.
data: Buffer,
data: Bytes,
/// The font's index in the buffer.
index: u32,
/// Metadata about the font.
@ -41,7 +43,7 @@ struct Repr {
impl Font {
/// Parse a font from data and collection index.
pub fn new(data: Buffer, index: u32) -> Option<Self> {
pub fn new(data: Bytes, index: u32) -> Option<Self> {
// Safety:
// - The slices's location is stable in memory:
// - We don't move the underlying vector
@ -60,13 +62,13 @@ impl Font {
}
/// Parse all fonts in the given data.
pub fn iter(data: Buffer) -> impl Iterator<Item = Self> {
pub fn iter(data: Bytes) -> impl Iterator<Item = Self> {
let count = ttf_parser::fonts_in_collection(&data).unwrap_or(1);
(0..count).filter_map(move |index| Self::new(data.clone(), index))
}
/// The underlying buffer.
pub fn data(&self) -> &Buffer {
pub fn data(&self) -> &Bytes {
&self.0.data
}

View File

@ -1,5 +1,3 @@
use std::path::PathBuf;
use comemo::Track;
use ecow::EcoString;
@ -7,7 +5,6 @@ use crate::doc::Frame;
use crate::eval::{eval, Module, Route, Tracer, Value};
use crate::model::{Introspector, Label};
use crate::syntax::{ast, LinkedNode, Source, SyntaxKind};
use crate::util::PathExt;
use crate::World;
/// Try to determine a set of possible values for an expression.
@ -42,7 +39,7 @@ pub fn analyze_expr(world: &(dyn World + 'static), node: &LinkedNode) -> Vec<Val
world.track(),
route.track(),
tracer.track_mut(),
world.main(),
&world.main(),
)
.and_then(|module| {
typst::model::typeset(
@ -66,18 +63,11 @@ pub fn analyze_import(
source: &Source,
path: &str,
) -> Option<Module> {
let full: PathBuf = if let Some(path) = path.strip_prefix('/') {
world.root().join(path).normalize()
} else if let Some(dir) = source.path().parent() {
dir.join(path).normalize()
} else {
path.into()
};
let route = Route::default();
let mut tracer = Tracer::default();
let id = world.resolve(&full).ok()?;
let source = world.source(id);
eval(world.track(), route.track(), tracer.track_mut(), source).ok()
let id = source.id().join(path).ok()?;
let source = world.source(id).ok()?;
eval(world.track(), route.track(), tracer.track_mut(), &source).ok()
}
/// Find all labels and details for them.
@ -112,7 +102,7 @@ pub fn analyze_labels(
let split = output.len();
// Bibliography keys.
for (key, detail) in (items.bibliography_keys)(world.track(), introspector.track()) {
for (key, detail) in (items.bibliography_keys)(introspector.track()) {
output.push((Label(key), detail));
}

View File

@ -3,16 +3,17 @@ use std::num::NonZeroUsize;
use ecow::EcoString;
use crate::doc::{Destination, Frame, FrameItem, Meta, Position};
use crate::file::FileId;
use crate::geom::{Geometry, Point, Size};
use crate::model::Introspector;
use crate::syntax::{LinkedNode, Source, SourceId, Span, SyntaxKind};
use crate::syntax::{LinkedNode, Source, Span, SyntaxKind};
use crate::World;
/// Where to [jump](jump_from_click) to.
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Jump {
/// Jump to a position in a source file.
Source(SourceId, usize),
Source(FileId, usize),
/// Jump to an external URL.
Url(EcoString),
/// Jump to a point on a page.
@ -21,9 +22,9 @@ pub enum Jump {
impl Jump {
fn from_span(world: &dyn World, span: Span) -> Option<Self> {
let source = world.source(span.source());
let source = world.source(span.id()).ok()?;
let node = source.find(span)?;
Some(Self::Source(source.id(), node.offset()))
Some(Self::Source(span.id(), node.offset()))
}
}
@ -78,7 +79,7 @@ pub fn jump_from_click(
Size::new(width, text.size),
click,
) {
let source = world.source(span.source());
let source = world.source(span.id()).ok()?;
let node = source.find(span)?;
let pos = if node.kind() == SyntaxKind::Text {
let range = node.range();

View File

@ -18,7 +18,7 @@ use usvg::{TreeParsing, TreeTextToPath};
use crate::diag::{format_xml_like_error, StrResult};
use crate::font::Font;
use crate::geom::Axes;
use crate::util::Buffer;
use crate::util::Bytes;
use crate::World;
/// A raster or vector image.
@ -31,7 +31,7 @@ pub struct Image(Arc<Prehashed<Repr>>);
#[derive(Hash)]
struct Repr {
/// The raw, undecoded image data.
data: Buffer,
data: Bytes,
/// The format of the encoded `buffer`.
format: ImageFormat,
/// The size of the image.
@ -47,7 +47,7 @@ impl Image {
/// Create an image from a buffer and a format.
#[comemo::memoize]
pub fn new(
data: Buffer,
data: Bytes,
format: ImageFormat,
alt: Option<EcoString>,
) -> StrResult<Self> {
@ -71,7 +71,7 @@ impl Image {
/// Create a font-dependant image from a buffer and a format.
#[comemo::memoize]
pub fn with_fonts(
data: Buffer,
data: Bytes,
format: ImageFormat,
world: Tracked<dyn World + '_>,
fallback_family: Option<&str>,
@ -95,7 +95,7 @@ impl Image {
}
/// The raw image data.
pub fn data(&self) -> &Buffer {
pub fn data(&self) -> &Bytes {
&self.0.data
}
@ -234,7 +234,7 @@ pub struct IccProfile(pub Vec<u8>);
/// Decode a raster image.
#[comemo::memoize]
fn decode_raster(data: &Buffer, format: RasterFormat) -> StrResult<Arc<DecodedImage>> {
fn decode_raster(data: &Bytes, format: RasterFormat) -> StrResult<Arc<DecodedImage>> {
fn decode_with<'a, T: ImageDecoder<'a>>(
decoder: ImageResult<T>,
) -> ImageResult<(image::DynamicImage, Option<IccProfile>)> {
@ -259,7 +259,7 @@ fn decode_raster(data: &Buffer, format: RasterFormat) -> StrResult<Arc<DecodedIm
/// Decode an SVG image.
#[comemo::memoize]
fn decode_svg(
data: &Buffer,
data: &Bytes,
loader: Tracked<dyn SvgFontLoader + '_>,
) -> StrResult<Arc<DecodedImage>> {
// Disable usvg's default to "Times New Roman". Instead, we default to

View File

@ -45,6 +45,7 @@ pub mod diag;
pub mod eval;
pub mod doc;
pub mod export;
pub mod file;
pub mod font;
pub mod geom;
pub mod ide;
@ -52,16 +53,15 @@ pub mod image;
pub mod model;
pub mod syntax;
use std::path::Path;
use comemo::{Prehashed, Track, TrackedMut};
use crate::diag::{FileResult, SourceResult};
use crate::doc::Document;
use crate::eval::{Datetime, Library, Route, Tracer};
use crate::file::FileId;
use crate::font::{Font, FontBook};
use crate::syntax::{Source, SourceId};
use crate::util::Buffer;
use crate::syntax::Source;
use crate::util::Bytes;
/// Compile a source file into a fully layouted document.
#[tracing::instrument(skip(world))]
@ -79,7 +79,7 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
world,
route.track(),
TrackedMut::reborrow_mut(&mut tracer),
world.main(),
&world.main(),
)?;
// Typeset the module's contents.
@ -87,35 +87,38 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
}
/// The environment in which typesetting occurs.
///
/// All loading functions (`main`, `source`, `file`, `font`) should perform
/// internal caching so that they are relatively cheap on repeated invocations
/// with the same argument. [`Source`], [`Bytes`], and [`Font`] are
/// all reference-counted and thus cheap to clone.
///
/// The compiler doesn't do the caching itself because the world has much more
/// information on when something can change. For example, fonts typically don't
/// change and can thus even be cached across multiple compilations (for
/// long-running applications like `typst watch`). Source files on the other
/// hand can change and should thus be cleared after. Advanced clients like
/// language servers can also retain the source files and [edited](Source::edit)
/// them in-place to benefit from better incremental performance.
#[comemo::track]
pub trait World {
/// The path relative to which absolute paths are.
///
/// Defaults to the empty path.
fn root(&self) -> &Path {
Path::new("")
}
/// The standard library.
fn library(&self) -> &Prehashed<Library>;
/// The main source file.
fn main(&self) -> &Source;
/// Try to resolve the unique id of a source file.
fn resolve(&self, path: &Path) -> FileResult<SourceId>;
/// Access a source file by id.
fn source(&self, id: SourceId) -> &Source;
/// Metadata about all known fonts.
fn book(&self) -> &Prehashed<FontBook>;
/// Try to access the font with the given id.
fn font(&self, id: usize) -> Option<Font>;
/// Access the main source file.
fn main(&self) -> Source;
/// Try to access a file at a path.
fn file(&self, path: &Path) -> FileResult<Buffer>;
/// Try to access the specified source file.
fn source(&self, id: FileId) -> FileResult<Source>;
/// Try to access the specified file.
fn file(&self, id: FileId) -> FileResult<Bytes>;
/// Try to access the font with the given index in the font book.
fn font(&self, index: usize) -> Option<Font>;
/// Get the current date.
///

View File

@ -3,7 +3,7 @@ use unicode_ident::{is_xid_continue, is_xid_start};
use unicode_segmentation::UnicodeSegmentation;
use unscanny::Scanner;
use super::{ErrorPos, SyntaxKind};
use super::SyntaxKind;
/// Splits up a string of source code into tokens.
#[derive(Clone)]
@ -16,7 +16,7 @@ pub(super) struct Lexer<'s> {
/// Whether the last token contained a newline.
newline: bool,
/// An error for the last token.
error: Option<(EcoString, ErrorPos)>,
error: Option<EcoString>,
}
/// What kind of tokens to emit.
@ -69,7 +69,7 @@ impl<'s> Lexer<'s> {
}
/// Take out the last error, if any.
pub fn take_error(&mut self) -> Option<(EcoString, ErrorPos)> {
pub fn take_error(&mut self) -> Option<EcoString> {
self.error.take()
}
}
@ -77,7 +77,7 @@ impl<'s> Lexer<'s> {
impl Lexer<'_> {
/// Construct a full-positioned syntax error.
fn error(&mut self, message: impl Into<EcoString>) -> SyntaxKind {
self.error = Some((message.into(), ErrorPos::Full));
self.error = Some(message.into());
SyntaxKind::Error
}
}

View File

@ -12,9 +12,9 @@ mod span;
pub use self::kind::SyntaxKind;
pub use self::lexer::{is_ident, is_newline};
pub use self::node::{ErrorPos, LinkedChildren, LinkedNode, SyntaxNode};
pub use self::node::{LinkedChildren, LinkedNode, SyntaxNode};
pub use self::parser::{parse, parse_code};
pub use self::source::{Source, SourceId};
pub use self::source::Source;
pub use self::span::{Span, Spanned};
pub(crate) use self::lexer::{is_id_continue, is_id_start};

View File

@ -6,8 +6,9 @@ use std::sync::Arc;
use ecow::EcoString;
use super::ast::AstNode;
use super::{SourceId, Span, SyntaxKind};
use super::{Span, SyntaxKind};
use crate::diag::SourceError;
use crate::file::FileId;
/// A node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
@ -36,12 +37,8 @@ impl SyntaxNode {
}
/// Create a new error node.
pub fn error(
message: impl Into<EcoString>,
text: impl Into<EcoString>,
pos: ErrorPos,
) -> Self {
Self(Repr::Error(Arc::new(ErrorNode::new(message, text, pos))))
pub fn error(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
Self(Repr::Error(Arc::new(ErrorNode::new(message, text))))
}
/// The type of the node.
@ -145,7 +142,7 @@ impl SyntaxNode {
}
if let Repr::Error(error) = &self.0 {
vec![SourceError::new(error.span, error.message.clone()).with_pos(error.pos)]
vec![SourceError::new(error.span, error.message.clone())]
} else {
self.children()
.filter(|node| node.erroneous())
@ -186,14 +183,14 @@ impl SyntaxNode {
/// Convert the child to an error.
pub(super) fn convert_to_error(&mut self, message: impl Into<EcoString>) {
let text = std::mem::take(self).into_text();
*self = SyntaxNode::error(message, text, ErrorPos::Full);
*self = SyntaxNode::error(message, text);
}
/// Assign spans to each node.
#[tracing::instrument(skip_all)]
pub(super) fn numberize(
&mut self,
id: SourceId,
id: FileId,
within: Range<u64>,
) -> NumberingResult {
if within.start >= within.end {
@ -285,7 +282,7 @@ impl Debug for SyntaxNode {
impl Default for SyntaxNode {
fn default() -> Self {
Self::error("", "", ErrorPos::Full)
Self::error("", "")
}
}
@ -381,7 +378,7 @@ impl InnerNode {
/// a `range` of its children.
fn numberize(
&mut self,
id: SourceId,
id: FileId,
range: Option<Range<usize>>,
within: Range<u64>,
) -> NumberingResult {
@ -492,7 +489,7 @@ impl InnerNode {
// Try to renumber.
let within = start_number..end_number;
let id = self.span.source();
let id = self.span.id();
if self.numberize(id, Some(renumber), within).is_ok() {
return Ok(());
}
@ -540,23 +537,16 @@ struct ErrorNode {
message: EcoString,
/// The source text of the node.
text: EcoString,
/// Where in the node an error should be annotated.
pos: ErrorPos,
/// The node's span.
span: Span,
}
impl ErrorNode {
/// Create new error node.
fn new(
message: impl Into<EcoString>,
text: impl Into<EcoString>,
pos: ErrorPos,
) -> Self {
fn new(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
Self {
message: message.into(),
text: text.into(),
pos,
span: Span::detached(),
}
}
@ -573,17 +563,6 @@ impl Debug for ErrorNode {
}
}
/// Where in a node an error should be annotated,
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum ErrorPos {
/// Over the full width of the node.
Full,
/// At the start of the node.
Start,
/// At the end of the node.
End,
}
/// A syntax node in a context.
///
/// Knows its exact offset in the file and provides access to its

View File

@ -4,7 +4,7 @@ use std::ops::Range;
use ecow::{eco_format, EcoString};
use unicode_math_class::MathClass;
use super::{ast, is_newline, ErrorPos, LexMode, Lexer, SyntaxKind, SyntaxNode};
use super::{ast, is_newline, LexMode, Lexer, SyntaxKind, SyntaxNode};
/// Parse a source file.
pub fn parse(text: &str) -> SyntaxNode {
@ -1560,8 +1560,8 @@ impl<'s> Parser<'s> {
fn save(&mut self) {
let text = self.current_text();
if self.at(SyntaxKind::Error) {
let (message, pos) = self.lexer.take_error().unwrap();
self.nodes.push(SyntaxNode::error(message, text, pos));
let message = self.lexer.take_error().unwrap();
self.nodes.push(SyntaxNode::error(message, text));
} else {
self.nodes.push(SyntaxNode::leaf(self.current, text));
}
@ -1608,14 +1608,14 @@ impl<'s> Parser<'s> {
.map_or(true, |child| child.kind() != SyntaxKind::Error)
{
let message = eco_format!("expected {}", thing);
self.nodes.push(SyntaxNode::error(message, "", ErrorPos::Full));
self.nodes.push(SyntaxNode::error(message, ""));
}
self.skip();
}
fn expected_at(&mut self, m: Marker, thing: &str) {
let message = eco_format!("expected {}", thing);
let error = SyntaxNode::error(message, "", ErrorPos::Full);
let error = SyntaxNode::error(message, "");
self.nodes.insert(m.0, error);
}

View File

@ -19,7 +19,7 @@ pub fn reparse(
replacement_len: usize,
) -> Range<usize> {
try_reparse(text, replaced, replacement_len, None, root, 0).unwrap_or_else(|| {
let id = root.span().source();
let id = root.span().id();
*root = parse(text);
root.numberize(id, Span::FULL).unwrap();
0..text.len()

View File

@ -3,105 +3,107 @@
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::ops::Range;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use comemo::Prehashed;
use unscanny::Scanner;
use super::ast::Markup;
use super::reparser::reparse;
use super::{is_newline, parse, LinkedNode, Span, SyntaxNode};
use crate::diag::SourceResult;
use crate::util::{PathExt, StrExt};
use crate::file::FileId;
use crate::util::StrExt;
/// A source file.
///
/// All line and column indices start at zero, just like byte indices. Only for
/// user-facing display, you should add 1 to them.
///
/// Values of this type are cheap to clone and hash.
#[derive(Clone)]
pub struct Source {
id: SourceId,
path: PathBuf,
lines: Vec<Line>,
pub struct Source(Arc<Repr>);
/// The internal representation.
#[derive(Clone)]
struct Repr {
id: FileId,
text: Prehashed<String>,
root: Prehashed<SyntaxNode>,
lines: Vec<Line>,
}
impl Source {
/// Create a new source file.
///
/// The path must be canonical, so that the same source file has the same
/// id even if accessed through different paths.
#[tracing::instrument(skip_all)]
pub fn new(id: SourceId, path: &Path, text: String) -> Self {
pub fn new(id: FileId, text: String) -> Self {
let mut root = parse(&text);
root.numberize(id, Span::FULL).unwrap();
Self {
Self(Arc::new(Repr {
id,
path: path.normalize(),
lines: lines(&text),
text: Prehashed::new(text),
root: Prehashed::new(root),
}
}))
}
/// Create a source file without a real id and path, usually for testing.
pub fn detached(text: impl Into<String>) -> Self {
Self::new(SourceId::detached(), Path::new(""), text.into())
Self::new(FileId::detached(), text.into())
}
/// Create a source file with the same synthetic span for all nodes.
pub fn synthesized(text: String, span: Span) -> Self {
let mut root = parse(&text);
root.synthesize(span);
Self {
id: SourceId::detached(),
path: PathBuf::new(),
Self(Arc::new(Repr {
id: FileId::detached(),
lines: lines(&text),
text: Prehashed::new(text),
root: Prehashed::new(root),
}
}))
}
/// The root node of the file's untyped syntax tree.
pub fn root(&self) -> &SyntaxNode {
&self.root
&self.0.root
}
/// The root node of the file's typed abstract syntax tree.
pub fn ast(&self) -> SourceResult<Markup> {
let errors = self.root.errors();
let errors = self.root().errors();
if errors.is_empty() {
Ok(self.root.cast().expect("root node must be markup"))
Ok(self.root().cast().expect("root node must be markup"))
} else {
Err(Box::new(errors))
}
}
/// The id of the source file.
pub fn id(&self) -> SourceId {
self.id
}
/// The normalized path to the source file.
pub fn path(&self) -> &Path {
&self.path
pub fn id(&self) -> FileId {
self.0.id
}
/// The whole source as a string slice.
pub fn text(&self) -> &str {
&self.text
&self.0.text
}
/// Slice out the part of the source code enclosed by the range.
pub fn get(&self, range: Range<usize>) -> Option<&str> {
self.text.get(range)
self.text().get(range)
}
/// Fully replace the source text.
pub fn replace(&mut self, text: String) {
self.text = Prehashed::new(text);
self.lines = lines(&self.text);
let mut root = parse(&self.text);
root.numberize(self.id, Span::FULL).unwrap();
self.root = Prehashed::new(root);
let inner = Arc::make_mut(&mut self.0);
inner.text = Prehashed::new(text);
inner.lines = lines(&inner.text);
let mut root = parse(&inner.text);
root.numberize(inner.id, Span::FULL).unwrap();
inner.root = Prehashed::new(root);
}
/// Edit the source file by replacing the given range.
@ -112,72 +114,70 @@ impl Source {
#[track_caller]
pub fn edit(&mut self, replace: Range<usize>, with: &str) -> Range<usize> {
let start_byte = replace.start;
let start_utf16 = self.byte_to_utf16(replace.start).unwrap();
self.text.update(|text| text.replace_range(replace.clone(), with));
let start_utf16 = self.byte_to_utf16(start_byte).unwrap();
let line = self.byte_to_line(start_byte).unwrap();
let inner = Arc::make_mut(&mut self.0);
// Update the text itself.
inner.text.update(|text| text.replace_range(replace.clone(), with));
// Remove invalidated line starts.
let line = self.byte_to_line(start_byte).unwrap();
self.lines.truncate(line + 1);
inner.lines.truncate(line + 1);
// Handle adjoining of \r and \n.
if self.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
self.lines.pop();
if inner.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
inner.lines.pop();
}
// Recalculate the line starts after the edit.
self.lines
.extend(lines_from(start_byte, start_utf16, &self.text[start_byte..]));
inner.lines.extend(lines_from(
start_byte,
start_utf16,
&inner.text[start_byte..],
));
// Incrementally reparse the replaced range.
self.root
.update(|root| reparse(root, &self.text, replace, with.len()))
inner
.root
.update(|root| reparse(root, &inner.text, replace, with.len()))
}
/// Get the length of the file in UTF-8 encoded bytes.
pub fn len_bytes(&self) -> usize {
self.text.len()
self.text().len()
}
/// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize {
let last = self.lines.last().unwrap();
last.utf16_idx + self.text[last.byte_idx..].len_utf16()
let last = self.0.lines.last().unwrap();
last.utf16_idx + self.0.text[last.byte_idx..].len_utf16()
}
/// Get the length of the file in lines.
pub fn len_lines(&self) -> usize {
self.lines.len()
self.0.lines.len()
}
/// Find the node with the given span.
///
/// Returns `None` if the span does not point into this source file.
pub fn find(&self, span: Span) -> Option<LinkedNode<'_>> {
LinkedNode::new(&self.root).find(span)
}
/// Map a span that points into this source file to a byte range.
///
/// Panics if the span does not point into this source file.
#[track_caller]
pub fn range(&self, span: Span) -> Range<usize> {
self.find(span)
.expect("span does not point into this source file")
.range()
LinkedNode::new(self.root()).find(span)
}
/// Return the index of the UTF-16 code unit at the byte index.
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
let line_idx = self.byte_to_line(byte_idx)?;
let line = self.lines.get(line_idx)?;
let head = self.text.get(line.byte_idx..byte_idx)?;
let line = self.0.lines.get(line_idx)?;
let head = self.0.text.get(line.byte_idx..byte_idx)?;
Some(line.utf16_idx + head.len_utf16())
}
/// Return the index of the line that contains the given byte index.
pub fn byte_to_line(&self, byte_idx: usize) -> Option<usize> {
(byte_idx <= self.text.len()).then(|| {
match self.lines.binary_search_by_key(&byte_idx, |line| line.byte_idx) {
(byte_idx <= self.0.text.len()).then(|| {
match self.0.lines.binary_search_by_key(&byte_idx, |line| line.byte_idx) {
Ok(i) => i,
Err(i) => i - 1,
}
@ -197,33 +197,33 @@ impl Source {
/// Return the byte index at the UTF-16 code unit.
pub fn utf16_to_byte(&self, utf16_idx: usize) -> Option<usize> {
let line = self.lines.get(
match self.lines.binary_search_by_key(&utf16_idx, |line| line.utf16_idx) {
let line = self.0.lines.get(
match self.0.lines.binary_search_by_key(&utf16_idx, |line| line.utf16_idx) {
Ok(i) => i,
Err(i) => i - 1,
},
)?;
let mut k = line.utf16_idx;
for (i, c) in self.text[line.byte_idx..].char_indices() {
for (i, c) in self.0.text[line.byte_idx..].char_indices() {
if k >= utf16_idx {
return Some(line.byte_idx + i);
}
k += c.len_utf16();
}
(k == utf16_idx).then_some(self.text.len())
(k == utf16_idx).then_some(self.0.text.len())
}
/// Return the byte position at which the given line starts.
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
self.lines.get(line_idx).map(|line| line.byte_idx)
self.0.lines.get(line_idx).map(|line| line.byte_idx)
}
/// Return the range which encloses the given line.
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
let start = self.line_to_byte(line_idx)?;
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text.len());
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.0.text.len());
Some(start..end)
}
@ -248,42 +248,21 @@ impl Source {
impl Debug for Source {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Source({})", self.path.display())
write!(f, "Source({})", self.id().path().display())
}
}
impl Hash for Source {
fn hash<H: Hasher>(&self, state: &mut H) {
self.id.hash(state);
self.path.hash(state);
self.text.hash(state);
self.root.hash(state);
self.0.id.hash(state);
self.0.text.hash(state);
self.0.root.hash(state);
}
}
/// A unique identifier for a loaded source file.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct SourceId(u16);
impl SourceId {
/// Create a new source id for a file that is not part of the world.
pub const fn detached() -> Self {
Self(u16::MAX)
}
/// Whether the source id is the detached.
pub const fn is_detached(self) -> bool {
self.0 == Self::detached().0
}
/// Create a source id from a number.
pub const fn from_u16(v: u16) -> Self {
Self(v)
}
/// Extract the underlying number.
pub const fn as_u16(self) -> u16 {
self.0
impl AsRef<str> for Source {
fn as_ref(&self) -> &str {
self.text()
}
}
@ -309,7 +288,7 @@ fn lines_from(
utf16_offset: usize,
text: &str,
) -> impl Iterator<Item = Line> + '_ {
let mut s = Scanner::new(text);
let mut s = unscanny::Scanner::new(text);
let mut utf16_idx = utf16_offset;
std::iter::from_fn(move || {
@ -340,7 +319,7 @@ mod tests {
fn test_source_file_new() {
let source = Source::detached(TEST);
assert_eq!(
source.lines,
source.0.lines,
[
Line { byte_idx: 0, utf16_idx: 0 },
Line { byte_idx: 7, utf16_idx: 6 },
@ -421,8 +400,8 @@ mod tests {
let mut source = Source::detached(prev);
let result = Source::detached(after);
source.edit(range, with);
assert_eq!(source.text, result.text);
assert_eq!(source.lines, result.lines);
assert_eq!(source.text(), result.text());
assert_eq!(source.0.lines, result.0.lines);
}
// Test inserting at the beginning.

View File

@ -2,13 +2,15 @@ use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU64;
use std::ops::Range;
use super::SourceId;
use super::Source;
use crate::file::FileId;
use crate::World;
/// A unique identifier for a syntax node.
///
/// This is used throughout the compiler to track which source section an error
/// or element stems from. Can be [mapped back](super::Source::range) to a byte
/// range for user facing display.
/// or element stems from. Can be [mapped back](Self::range) to a byte range for
/// user facing display.
///
/// During editing, the span values stay mostly stable, even for nodes behind an
/// insertion. This is not true for simple ranges as they would shift. Spans can
@ -39,7 +41,7 @@ impl Span {
///
/// Panics if the `number` is not contained in `FULL`.
#[track_caller]
pub const fn new(id: SourceId, number: u64) -> Self {
pub const fn new(id: FileId, number: u64) -> Self {
assert!(
Self::FULL.start <= number && number < Self::FULL.end,
"span number outside valid range"
@ -50,12 +52,12 @@ impl Span {
/// A span that does not point into any source file.
pub const fn detached() -> Self {
Self::pack(SourceId::detached(), Self::DETACHED)
Self::pack(FileId::detached(), Self::DETACHED)
}
/// Pack the components into a span.
#[track_caller]
const fn pack(id: SourceId, number: u64) -> Span {
const fn pack(id: FileId, number: u64) -> Span {
let bits = ((id.as_u16() as u64) << Self::BITS) | number;
match NonZeroU64::new(bits) {
Some(v) => Self(v),
@ -63,20 +65,38 @@ impl Span {
}
}
/// Whether the span is detached.
pub const fn is_detached(self) -> bool {
self.source().is_detached()
}
/// The id of the source file the span points into.
pub const fn source(self) -> SourceId {
SourceId::from_u16((self.0.get() >> Self::BITS) as u16)
pub const fn id(self) -> FileId {
FileId::from_u16((self.0.get() >> Self::BITS) as u16)
}
/// The unique number of the span within its source file.
pub const fn number(self) -> u64 {
self.0.get() & ((1 << Self::BITS) - 1)
}
/// Whether the span is detached.
pub const fn is_detached(self) -> bool {
self.id().is_detached()
}
/// Get the byte range for this span.
#[track_caller]
pub fn range(self, world: &dyn World) -> Range<usize> {
let source = world
.source(self.id())
.expect("span does not point into any source file");
self.range_in(&source)
}
/// Get the byte range for this span in the given source file.
#[track_caller]
pub fn range_in(self, source: &Source) -> Range<usize> {
source
.find(self)
.expect("span does not point into this source file")
.range()
}
}
/// A value with a span locating it in the source code.
@ -116,13 +136,13 @@ impl<T: Debug> Debug for Spanned<T> {
#[cfg(test)]
mod tests {
use super::{SourceId, Span};
use super::{FileId, Span};
#[test]
fn test_span_encoding() {
let id = SourceId::from_u16(5);
let id = FileId::from_u16(5);
let span = Span::new(id, 10);
assert_eq!(span.source(), id);
assert_eq!(span.id(), id);
assert_eq!(span.number(), 10);
}
}

View File

@ -5,11 +5,11 @@ use std::sync::Arc;
use comemo::Prehashed;
/// A shared buffer that is cheap to clone and hash.
/// A shared byte buffer that is cheap to clone and hash.
#[derive(Clone, Hash, Eq, PartialEq)]
pub struct Buffer(Arc<Prehashed<Cow<'static, [u8]>>>);
pub struct Bytes(Arc<Prehashed<Cow<'static, [u8]>>>);
impl Buffer {
impl Bytes {
/// Create a buffer from a static byte slice.
pub fn from_static(slice: &'static [u8]) -> Self {
Self(Arc::new(Prehashed::new(Cow::Borrowed(slice))))
@ -26,19 +26,19 @@ impl Buffer {
}
}
impl From<&[u8]> for Buffer {
impl From<&[u8]> for Bytes {
fn from(slice: &[u8]) -> Self {
Self(Arc::new(Prehashed::new(slice.to_vec().into())))
}
}
impl From<Vec<u8>> for Buffer {
impl From<Vec<u8>> for Bytes {
fn from(vec: Vec<u8>) -> Self {
Self(Arc::new(Prehashed::new(vec.into())))
}
}
impl Deref for Buffer {
impl Deref for Bytes {
type Target = [u8];
fn deref(&self) -> &Self::Target {
@ -46,14 +46,14 @@ impl Deref for Buffer {
}
}
impl AsRef<[u8]> for Buffer {
impl AsRef<[u8]> for Bytes {
fn as_ref(&self) -> &[u8] {
self
}
}
impl Debug for Buffer {
impl Debug for Bytes {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.pad("Buffer(..)")
write!(f, "bytes({})", self.len())
}
}

View File

@ -2,9 +2,9 @@
pub mod fat;
mod buffer;
mod bytes;
pub use buffer::Buffer;
pub use bytes::Bytes;
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
@ -125,26 +125,60 @@ where
pub trait PathExt {
/// Lexically normalize a path.
fn normalize(&self) -> PathBuf;
/// Treat `self` as a virtual root relative to which the `path` is resolved.
///
/// Returns `None` if the path lexically escapes the root. The path
/// might still escape through symlinks.
fn join_rooted(&self, path: &Path) -> Option<PathBuf>;
}
impl PathExt for Path {
#[tracing::instrument(skip_all)]
fn normalize(&self) -> PathBuf {
let mut out = PathBuf::new();
for component in self.components() {
match component {
Component::CurDir => {}
Component::ParentDir => match out.components().next_back() {
Some(Component::RootDir) => {}
Some(Component::Normal(_)) => {
out.pop();
}
_ => out.push(component),
},
_ => out.push(component),
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
out.push(component)
}
}
}
if out.as_os_str().is_empty() {
out.push(Component::CurDir);
}
out
}
fn join_rooted(&self, path: &Path) -> Option<PathBuf> {
let mut parts: Vec<_> = self.components().collect();
let root = parts.len();
for component in path.components() {
match component {
Component::Prefix(_) => return None,
Component::RootDir => parts.truncate(root),
Component::CurDir => {}
Component::ParentDir => {
if parts.len() <= root {
return None;
}
parts.pop();
}
Component::Normal(_) => parts.push(component),
}
}
if parts.len() < root {
return None;
}
Some(parts.into_iter().collect())
}
}
/// Format pieces separated with commas and a final "and" or "or".

View File

@ -10,7 +10,6 @@ publish = false
typst = { path = ".." }
typst-library = { path = "../library" }
comemo = "0.3"
elsa = "1.8"
iai = { git = "https://github.com/reknih/iai" }
once_cell = "1"
oxipng = "8.0.0"

1
tests/packages/adder-0.1.0/lib.typ vendored Normal file
View File

@ -0,0 +1 @@
#let add(x, y) = x + y

4
tests/packages/adder-0.1.0/typst.toml vendored Normal file
View File

@ -0,0 +1,4 @@
[package]
name = "adder"
version = "0.1.0"
entrypoint = "lib.typ"

View File

@ -1,13 +1,12 @@
use std::path::Path;
use comemo::{Prehashed, Track, Tracked};
use iai::{black_box, main, Iai};
use typst::diag::{FileError, FileResult};
use typst::diag::FileResult;
use typst::eval::{Datetime, Library};
use typst::file::FileId;
use typst::font::{Font, FontBook};
use typst::geom::Color;
use typst::syntax::{Source, SourceId};
use typst::util::Buffer;
use typst::syntax::Source;
use typst::util::Bytes;
use typst::World;
use unscanny::Scanner;
@ -124,31 +123,27 @@ impl World for BenchWorld {
&self.library
}
fn main(&self) -> &Source {
&self.source
}
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
Err(FileError::NotFound(path.into()))
}
fn source(&self, _: SourceId) -> &Source {
&self.source
}
fn book(&self) -> &Prehashed<FontBook> {
&self.book
}
fn main(&self) -> Source {
self.source.clone()
}
fn source(&self, _: FileId) -> FileResult<Source> {
unimplemented!()
}
fn file(&self, _: FileId) -> FileResult<Bytes> {
unimplemented!()
}
fn font(&self, _: usize) -> Option<Font> {
Some(self.font.clone())
}
fn file(&self, path: &Path) -> FileResult<Buffer> {
Err(FileError::NotFound(path.into()))
}
fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap())
unimplemented!()
}
}

View File

@ -13,11 +13,11 @@ use std::path::{Path, PathBuf};
use clap::Parser;
use comemo::{Prehashed, Track};
use elsa::FrozenVec;
use oxipng::{InFile, Options, OutFile};
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::cell::OnceCell;
use tiny_skia as sk;
use typst::file::FileId;
use unscanny::Scanner;
use walkdir::WalkDir;
@ -26,8 +26,8 @@ use typst::doc::{Document, Frame, FrameItem, Meta};
use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value};
use typst::font::{Font, FontBook};
use typst::geom::{Abs, Color, RgbaColor, Smart};
use typst::syntax::{Source, SourceId, Span, SyntaxNode};
use typst::util::{Buffer, PathExt};
use typst::syntax::{Source, Span, SyntaxNode};
use typst::util::{Bytes, PathExt};
use typst::World;
use typst_library::layout::{Margin, PageElem};
use typst_library::text::{TextElem, TextSize};
@ -197,34 +197,21 @@ fn library() -> Library {
}
/// A world that provides access to the tests environment.
#[derive(Clone)]
struct TestWorld {
print: PrintConfig,
main: FileId,
library: Prehashed<Library>,
book: Prehashed<FontBook>,
fonts: Vec<Font>,
paths: RefCell<HashMap<PathBuf, PathSlot>>,
sources: FrozenVec<Box<Source>>,
main: SourceId,
}
impl Clone for TestWorld {
fn clone(&self) -> Self {
Self {
print: self.print,
library: self.library.clone(),
book: self.book.clone(),
fonts: self.fonts.clone(),
paths: self.paths.clone(),
sources: FrozenVec::from_iter(self.sources.iter().cloned().map(Box::new)),
main: self.main,
}
}
}
#[derive(Default, Clone)]
#[derive(Clone)]
struct PathSlot {
source: OnceCell<FileResult<SourceId>>,
buffer: OnceCell<FileResult<Buffer>>,
system_path: PathBuf,
source: OnceCell<FileResult<Source>>,
buffer: OnceCell<FileResult<Bytes>>,
}
impl TestWorld {
@ -243,92 +230,81 @@ impl TestWorld {
Self {
print,
main: FileId::detached(),
library: Prehashed::new(library()),
book: Prehashed::new(FontBook::from_fonts(&fonts)),
fonts,
paths: RefCell::default(),
sources: FrozenVec::new(),
main: SourceId::detached(),
}
}
}
impl World for TestWorld {
fn root(&self) -> &Path {
Path::new(FILE_DIR)
}
fn library(&self) -> &Prehashed<Library> {
&self.library
}
fn main(&self) -> &Source {
self.source(self.main)
}
fn resolve(&self, path: &Path) -> FileResult<SourceId> {
self.slot(path)
.source
.get_or_init(|| {
let buf = read(path)?;
let text = String::from_utf8(buf)?;
Ok(self.insert(path, text))
})
.clone()
}
fn source(&self, id: SourceId) -> &Source {
&self.sources[id.as_u16() as usize]
}
fn book(&self) -> &Prehashed<FontBook> {
&self.book
}
fn font(&self, id: usize) -> Option<Font> {
Some(self.fonts[id].clone())
fn main(&self) -> Source {
self.source(self.main).unwrap()
}
fn file(&self, path: &Path) -> FileResult<Buffer> {
self.slot(path)
.buffer
.get_or_init(|| read(path).map(Buffer::from))
fn source(&self, id: FileId) -> FileResult<Source> {
let slot = self.slot(id)?;
slot.source
.get_or_init(|| {
let buf = read(&slot.system_path)?;
let text = String::from_utf8(buf)?;
Ok(Source::new(id, text))
})
.clone()
}
fn file(&self, id: FileId) -> FileResult<Bytes> {
let slot = self.slot(id)?;
slot.buffer
.get_or_init(|| read(&slot.system_path).map(Bytes::from))
.clone()
}
fn font(&self, id: usize) -> Option<Font> {
Some(self.fonts[id].clone())
}
fn today(&self, _: Option<i64>) -> Option<Datetime> {
Some(Datetime::from_ymd(1970, 1, 1).unwrap())
}
}
impl TestWorld {
fn set(&mut self, path: &Path, text: String) -> SourceId {
let slot = self.slot(path);
let id = if let Some(&Ok(id)) = slot.source.get() {
drop(slot);
self.sources.as_mut()[id.as_u16() as usize].replace(text);
id
} else {
let id = self.insert(path, text);
slot.source.set(Ok(id)).unwrap();
drop(slot);
id
fn set(&mut self, path: &Path, text: String) -> Source {
self.main = FileId::new(None, path);
let mut slot = self.slot(self.main).unwrap();
let source = Source::new(self.main, text);
slot.source = OnceCell::from(Ok(source.clone()));
source
}
fn slot(&self, id: FileId) -> FileResult<RefMut<PathSlot>> {
let path = id.path();
let root: PathBuf = match id.package() {
Some(spec) => format!("packages/{}-{}", spec.name, spec.version).into(),
None if path.is_relative() => PathBuf::new(),
None => FILE_DIR.into(),
};
self.main = id;
id
}
fn slot(&self, path: &Path) -> RefMut<PathSlot> {
RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(path.normalize()).or_default()
})
}
let system_path = root.join_rooted(id.path()).ok_or(FileError::AccessDenied)?;
fn insert(&self, path: &Path, text: String) -> SourceId {
let id = SourceId::from_u16(self.sources.len() as u16);
let source = Source::new(id, path, text);
self.sources.push(Box::new(source));
id
Ok(RefMut::map(self.paths.borrow_mut(), |paths| {
paths.entry(system_path.clone()).or_insert_with(|| PathSlot {
system_path,
source: OnceCell::new(),
buffer: OnceCell::new(),
})
}))
}
}
@ -522,26 +498,25 @@ fn test_part(
) -> (bool, bool, Vec<Frame>) {
let mut ok = true;
let id = world.set(src_path, text);
let source = world.source(id);
let source = world.set(src_path, text);
if world.print.syntax {
writeln!(output, "Syntax Tree:\n{:#?}\n", source.root()).unwrap();
}
let metadata = parse_part_metadata(source);
let metadata = parse_part_metadata(&source);
let compare_ref = metadata.part_configuration.compare_ref.unwrap_or(compare_ref);
let validate_hints =
metadata.part_configuration.validate_hints.unwrap_or(validate_hints);
ok &= test_spans(output, source.root());
ok &= test_reparse(output, world.source(id).text(), i, rng);
ok &= test_reparse(output, source.text(), i, rng);
if world.print.model {
let world = (world as &dyn World).track();
let route = typst::eval::Route::default();
let mut tracer = typst::eval::Tracer::default();
let module =
typst::eval::eval(world, route.track(), tracer.track_mut(), source).unwrap();
typst::eval::eval(world, route.track(), tracer.track_mut(), &source).unwrap();
writeln!(output, "Model:\n{:#?}\n", module.content()).unwrap();
}
@ -563,15 +538,17 @@ fn test_part(
// however, as the line of the hint is still verified.
let actual_errors_and_hints: HashSet<UserOutput> = errors
.into_iter()
.filter(|error| error.span.source() == id)
.inspect(|error| assert!(!error.span.is_detached()))
.filter(|error| error.span.id() == source.id())
.flat_map(|error| {
let range = error.span.range(world);
let output_error =
UserOutput::Error(error.range(world), error.message.replace('\\', "/"));
UserOutput::Error(range.clone(), error.message.replace('\\', "/"));
let hints = error
.hints
.iter()
.filter(|_| validate_hints) // No unexpected hints should be verified if disabled.
.map(|hint| UserOutput::Hint(error.range(world), hint.to_string()));
.map(|hint| UserOutput::Hint(range.clone(), hint.to_string()));
iter::once(output_error).chain(hints).collect::<Vec<_>>()
})
.collect();
@ -596,12 +573,12 @@ fn test_part(
for unexpected in unexpected_outputs {
write!(output, " Not annotated | ").unwrap();
print_user_output(output, source, line, unexpected)
print_user_output(output, &source, line, unexpected)
}
for missing in missing_outputs {
write!(output, " Not emitted | ").unwrap();
print_user_output(output, source, line, missing)
print_user_output(output, &source, line, missing)
}
}
@ -820,7 +797,7 @@ fn test_reparse(
let source = Source::detached(text);
let leafs = leafs(source.root());
let start = source.range(leafs[pick(0..leafs.len())].span()).start;
let start = source.find(leafs[pick(0..leafs.len())].span()).unwrap().offset();
let supplement = supplements[pick(0..supplements.len())];
ok &= apply(start..start, supplement);

View File

@ -1,4 +1,4 @@
// Test diagnostics.
// Test hints on diagnostics.
// Ref: false
---
@ -23,13 +23,17 @@
---
= Heading <intro>
// Error: 1:20-1:26 cannot reference heading without numbering
// Hint: 1:20-1:26 did you mean to use `#set heading(numbering: "1.")`?
// Hint: 1:20-1:26 you can enable heading numbering with `#set heading(numbering: "1.")`?
Can not be used as @intro
---
// This test is more of a tooling test. It checks if hint annotation validation
// can be turned off.
// Hints: false
// This test is more of a tooling test. It checks if hint annotation validation can be turned off.
= Heading <intro>
// Error: 1:20-1:26 cannot reference heading without numbering
Can not be used as @intro

View File

@ -0,0 +1,64 @@
// Test package imports
// Ref: false
---
// Test import without items.
#import "@test/adder:0.1.0"
#test(adder.add(2, 8), 10)
---
// Test import with items.
#import "@test/adder:0.1.0": add
#test(add(2, 8), 10)
---
// Error: 9-13 `@` is not a valid package namespace
#import "@@": *
---
// Error: 9-16 package specification is missing name
#import "@heya": *
---
// Error: 9-15 `123` is not a valid package namespace
#import "@123": *
---
// Error: 9-17 package specification is missing name
#import "@test/": *
---
// Error: 9-22 package specification is missing version
#import "@test/mypkg": *
---
// Error: 9-20 `$$$` is not a valid package name
#import "@test/$$$": *
---
// Error: 9-23 package specification is missing version
#import "@test/mypkg:": *
---
// Error: 9-24 version number is missing minor version
#import "@test/mypkg:0": *
---
// Error: 9-29 `latest` is not a valid major version
#import "@test/mypkg:latest": *
---
// Error: 9-29 `-3` is not a valid major version
#import "@test/mypkg:-3.0.0": *
---
// Error: 9-26 version number is missing patch version
#import "@test/mypkg:0.3": *
---
// Error: 9-27 version number is missing patch version
#import "@test/mypkg:0.3.": *
---
// Error: 9-28 file not found (searched at typ/compiler/#test/mypkg:1.0.0)
#import "#test/mypkg:1.0.0": *

View File

@ -54,9 +54,9 @@ A #box(image("/tiger.jpg", height: 1cm, width: 80%)) B
#image("path/does/not/exist")
---
// Error: 8-21 unknown image format
// Error: 2-22 unknown image format
#image("./image.typ")
---
// Error: 8-18 failed to parse svg: found closing tag 'g' instead of 'style' in line 4
// Error: 2-19 failed to parse svg: found closing tag 'g' instead of 'style' in line 4
#image("/bad.svg")