mirror of
https://github.com/typst/typst
synced 2025-05-14 17:15:28 +08:00
New source loading architecture
This commit is contained in:
parent
3c92bad9a7
commit
3932bb2cb9
@ -11,7 +11,7 @@ use typst::export::pdf;
|
|||||||
use typst::layout::{layout, Frame, LayoutTree};
|
use typst::layout::{layout, Frame, LayoutTree};
|
||||||
use typst::loading::FsLoader;
|
use typst::loading::FsLoader;
|
||||||
use typst::parse::parse;
|
use typst::parse::parse;
|
||||||
use typst::source::SourceFile;
|
use typst::source::SourceId;
|
||||||
use typst::syntax::SyntaxTree;
|
use typst::syntax::SyntaxTree;
|
||||||
use typst::Context;
|
use typst::Context;
|
||||||
|
|
||||||
@ -21,15 +21,13 @@ const CASES: &[&str] = &["coma.typ", "text/basic.typ"];
|
|||||||
|
|
||||||
fn benchmarks(c: &mut Criterion) {
|
fn benchmarks(c: &mut Criterion) {
|
||||||
let loader = FsLoader::new().with_path(FONT_DIR).wrap();
|
let loader = FsLoader::new().with_path(FONT_DIR).wrap();
|
||||||
let ctx = Rc::new(RefCell::new(Context::new(loader.clone())));
|
let ctx = Rc::new(RefCell::new(Context::new(loader)));
|
||||||
|
|
||||||
for case in CASES {
|
for case in CASES {
|
||||||
let path = Path::new(TYP_DIR).join(case);
|
let path = Path::new(TYP_DIR).join(case);
|
||||||
let name = path.file_stem().unwrap().to_string_lossy();
|
let name = path.file_stem().unwrap().to_string_lossy();
|
||||||
let file = loader.resolve(&path).unwrap();
|
let id = ctx.borrow_mut().sources.load(&path).unwrap();
|
||||||
let src = std::fs::read_to_string(&path).unwrap();
|
let case = Case::new(ctx.clone(), id);
|
||||||
let source = SourceFile::new(file, src);
|
|
||||||
let case = Case::new(ctx.clone(), source);
|
|
||||||
|
|
||||||
macro_rules! bench {
|
macro_rules! bench {
|
||||||
($step:literal, setup = |$ctx:ident| $setup:expr, code = $code:expr $(,)?) => {
|
($step:literal, setup = |$ctx:ident| $setup:expr, code = $code:expr $(,)?) => {
|
||||||
@ -82,7 +80,7 @@ fn benchmarks(c: &mut Criterion) {
|
|||||||
/// A test case with prepared intermediate results.
|
/// A test case with prepared intermediate results.
|
||||||
struct Case {
|
struct Case {
|
||||||
ctx: Rc<RefCell<Context>>,
|
ctx: Rc<RefCell<Context>>,
|
||||||
source: SourceFile,
|
id: SourceId,
|
||||||
ast: Rc<SyntaxTree>,
|
ast: Rc<SyntaxTree>,
|
||||||
module: Module,
|
module: Module,
|
||||||
tree: LayoutTree,
|
tree: LayoutTree,
|
||||||
@ -90,26 +88,23 @@ struct Case {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Case {
|
impl Case {
|
||||||
fn new(ctx: Rc<RefCell<Context>>, source: SourceFile) -> Self {
|
fn new(ctx: Rc<RefCell<Context>>, id: SourceId) -> Self {
|
||||||
let mut borrowed = ctx.borrow_mut();
|
let mut borrowed = ctx.borrow_mut();
|
||||||
let ast = Rc::new(parse(&source).unwrap());
|
let source = borrowed.sources.get(id);
|
||||||
let module = eval(&mut borrowed, source.file(), Rc::clone(&ast)).unwrap();
|
let ast = Rc::new(parse(source).unwrap());
|
||||||
|
let module = eval(&mut borrowed, id, Rc::clone(&ast)).unwrap();
|
||||||
let tree = exec(&mut borrowed, &module.template);
|
let tree = exec(&mut borrowed, &module.template);
|
||||||
let frames = layout(&mut borrowed, &tree);
|
let frames = layout(&mut borrowed, &tree);
|
||||||
drop(borrowed);
|
drop(borrowed);
|
||||||
Self { ctx, source, ast, module, tree, frames }
|
Self { ctx, id, ast, module, tree, frames }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(&self) -> SyntaxTree {
|
fn parse(&self) -> SyntaxTree {
|
||||||
parse(&self.source).unwrap()
|
parse(self.ctx.borrow().sources.get(self.id)).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eval(&self) -> TypResult<Module> {
|
fn eval(&self) -> TypResult<Module> {
|
||||||
eval(
|
eval(&mut self.ctx.borrow_mut(), self.id, Rc::clone(&self.ast))
|
||||||
&mut self.ctx.borrow_mut(),
|
|
||||||
self.source.file(),
|
|
||||||
Rc::clone(&self.ast),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn exec(&self) -> LayoutTree {
|
fn exec(&self) -> LayoutTree {
|
||||||
@ -121,7 +116,7 @@ impl Case {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn typeset(&self) -> TypResult<Vec<Rc<Frame>>> {
|
fn typeset(&self) -> TypResult<Vec<Rc<Frame>>> {
|
||||||
self.ctx.borrow_mut().typeset(&self.source)
|
self.ctx.borrow_mut().typeset(self.id)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pdf(&self) -> Vec<u8> {
|
fn pdf(&self) -> Vec<u8> {
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
use iai::{black_box, main};
|
use iai::{black_box, main};
|
||||||
|
|
||||||
use typst::diag::TypResult;
|
use typst::diag::TypResult;
|
||||||
use typst::loading::FileId;
|
|
||||||
use typst::parse::{parse, Scanner, TokenMode, Tokens};
|
use typst::parse::{parse, Scanner, TokenMode, Tokens};
|
||||||
use typst::source::SourceFile;
|
use typst::source::SourceFile;
|
||||||
use typst::syntax::SyntaxTree;
|
use typst::syntax::SyntaxTree;
|
||||||
@ -33,8 +32,7 @@ fn bench_tokenize() -> usize {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn bench_parse() -> TypResult<SyntaxTree> {
|
fn bench_parse() -> TypResult<SyntaxTree> {
|
||||||
let source = SourceFile::new(FileId::from_raw(0), black_box(SRC).into());
|
parse(&SourceFile::detached(black_box(SRC)))
|
||||||
parse(&source)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
main!(bench_decode, bench_scan, bench_tokenize, bench_parse);
|
main!(bench_decode, bench_scan, bench_tokenize, bench_parse);
|
||||||
|
32
src/diag.rs
32
src/diag.rs
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::loading::FileId;
|
use crate::source::SourceId;
|
||||||
use crate::syntax::Span;
|
use crate::syntax::Span;
|
||||||
|
|
||||||
/// The result type for typesetting and all its subpasses.
|
/// The result type for typesetting and all its subpasses.
|
||||||
@ -14,14 +14,14 @@ pub type StrResult<T> = Result<T, String>;
|
|||||||
/// An error in a source file.
|
/// An error in a source file.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)]
|
||||||
pub struct Error {
|
pub struct Error {
|
||||||
/// The file that contains the error.
|
/// The id of the source file that contains the error.
|
||||||
pub file: FileId,
|
pub source: SourceId,
|
||||||
/// The erroneous location in the source code.
|
/// The erroneous location in the source code.
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
/// A diagnostic message describing the problem.
|
/// A diagnostic message describing the problem.
|
||||||
pub message: String,
|
pub message: String,
|
||||||
/// The trace of function calls leading to the error.
|
/// The trace of function calls leading to the error.
|
||||||
pub trace: Vec<(FileId, Span, Tracepoint)>,
|
pub trace: Vec<(SourceId, Span, Tracepoint)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A part of an error's [trace](Error::trace).
|
/// A part of an error's [trace](Error::trace).
|
||||||
@ -35,9 +35,13 @@ pub enum Tracepoint {
|
|||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
/// Create a new, bare error.
|
/// Create a new, bare error.
|
||||||
pub fn new(file: FileId, span: impl Into<Span>, message: impl Into<String>) -> Self {
|
pub fn new(
|
||||||
|
source: SourceId,
|
||||||
|
span: impl Into<Span>,
|
||||||
|
message: impl Into<String>,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
file,
|
source,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
trace: vec![],
|
trace: vec![],
|
||||||
message: message.into(),
|
message: message.into(),
|
||||||
@ -47,11 +51,11 @@ impl Error {
|
|||||||
/// Create a boxed vector containing one error. The return value is suitable
|
/// Create a boxed vector containing one error. The return value is suitable
|
||||||
/// as the `Err` variant of a [`TypResult`].
|
/// as the `Err` variant of a [`TypResult`].
|
||||||
pub fn boxed(
|
pub fn boxed(
|
||||||
file: FileId,
|
source: SourceId,
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
message: impl Into<String>,
|
message: impl Into<String>,
|
||||||
) -> Box<Vec<Self>> {
|
) -> Box<Vec<Self>> {
|
||||||
Box::new(vec![Self::new(file, span, message)])
|
Box::new(vec![Self::new(source, span, message)])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Partially build a vec-boxed error, returning a function that just needs
|
/// Partially build a vec-boxed error, returning a function that just needs
|
||||||
@ -60,23 +64,23 @@ impl Error {
|
|||||||
/// This is useful in to convert from [`StrResult`] to a [`TypResult`] using
|
/// This is useful in to convert from [`StrResult`] to a [`TypResult`] using
|
||||||
/// [`map_err`](Result::map_err).
|
/// [`map_err`](Result::map_err).
|
||||||
pub fn partial(
|
pub fn partial(
|
||||||
file: FileId,
|
source: SourceId,
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> impl FnOnce(String) -> Box<Vec<Self>> {
|
) -> impl FnOnce(String) -> Box<Vec<Self>> {
|
||||||
move |message| Self::boxed(file, span, message)
|
move |message| Self::boxed(source, span, message)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Early-return with a vec-boxed [`Error`].
|
/// Early-return with a vec-boxed [`Error`].
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! bail {
|
macro_rules! bail {
|
||||||
($file:expr, $span:expr, $message:expr $(,)?) => {
|
($source:expr, $span:expr, $message:expr $(,)?) => {
|
||||||
return Err(Box::new(vec![$crate::diag::Error::new(
|
return Err(Box::new(vec![$crate::diag::Error::new(
|
||||||
$file, $span, $message,
|
$source, $span, $message,
|
||||||
)]));
|
)]));
|
||||||
};
|
};
|
||||||
|
|
||||||
($file:expr, $span:expr, $fmt:expr, $($arg:expr),+ $(,)?) => {
|
($source:expr, $span:expr, $fmt:expr, $($arg:expr),+ $(,)?) => {
|
||||||
$crate::bail!($file, $span, format!($fmt, $($arg),+));
|
$crate::bail!($source, $span, format!($fmt, $($arg),+));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ use std::rc::Rc;
|
|||||||
|
|
||||||
use super::{Cast, EvalContext, Value};
|
use super::{Cast, EvalContext, Value};
|
||||||
use crate::diag::{Error, TypResult};
|
use crate::diag::{Error, TypResult};
|
||||||
use crate::loading::FileId;
|
use crate::source::SourceId;
|
||||||
use crate::syntax::{Span, Spanned};
|
use crate::syntax::{Span, Spanned};
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
|
|
||||||
@ -59,8 +59,8 @@ impl PartialEq for Function {
|
|||||||
/// Evaluated arguments to a function.
|
/// Evaluated arguments to a function.
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct FuncArgs {
|
pub struct FuncArgs {
|
||||||
/// The file in which the function was called.
|
/// The id of the source file in which the function was called.
|
||||||
pub file: FileId,
|
pub source: SourceId,
|
||||||
/// The span of the whole argument list.
|
/// The span of the whole argument list.
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
/// The positional arguments.
|
/// The positional arguments.
|
||||||
@ -103,7 +103,7 @@ impl FuncArgs {
|
|||||||
{
|
{
|
||||||
match self.eat() {
|
match self.eat() {
|
||||||
Some(found) => Ok(found),
|
Some(found) => Ok(found),
|
||||||
None => bail!(self.file, self.span, "missing argument: {}", what),
|
None => bail!(self.source, self.span, "missing argument: {}", what),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -134,14 +134,14 @@ impl FuncArgs {
|
|||||||
let value = self.items.remove(index).value;
|
let value = self.items.remove(index).value;
|
||||||
let span = value.span;
|
let span = value.span;
|
||||||
|
|
||||||
T::cast(value).map(Some).map_err(Error::partial(self.file, span))
|
T::cast(value).map(Some).map_err(Error::partial(self.source, span))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return an "unexpected argument" error if there is any remaining
|
/// Return an "unexpected argument" error if there is any remaining
|
||||||
/// argument.
|
/// argument.
|
||||||
pub fn finish(self) -> TypResult<()> {
|
pub fn finish(self) -> TypResult<()> {
|
||||||
if let Some(arg) = self.items.first() {
|
if let Some(arg) = self.items.first() {
|
||||||
bail!(self.file, arg.span, "unexpected argument");
|
bail!(self.source, arg.span, "unexpected argument");
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
152
src/eval/mod.rs
152
src/eval/mod.rs
@ -21,30 +21,35 @@ pub use template::*;
|
|||||||
pub use value::*;
|
pub use value::*;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::io;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::Path;
|
use std::path::{Path, PathBuf};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::diag::{Error, StrResult, Tracepoint, TypResult};
|
use crate::diag::{Error, StrResult, Tracepoint, TypResult};
|
||||||
use crate::geom::{Angle, Fractional, Length, Relative};
|
use crate::geom::{Angle, Fractional, Length, Relative};
|
||||||
use crate::image::ImageCache;
|
use crate::image::ImageStore;
|
||||||
use crate::loading::{FileId, Loader};
|
use crate::loading::Loader;
|
||||||
use crate::parse::parse;
|
use crate::parse::parse;
|
||||||
use crate::source::{SourceFile, SourceMap};
|
use crate::source::{SourceId, SourceStore};
|
||||||
use crate::syntax::visit::Visit;
|
use crate::syntax::visit::Visit;
|
||||||
use crate::syntax::*;
|
use crate::syntax::*;
|
||||||
use crate::util::EcoString;
|
use crate::util::EcoString;
|
||||||
use crate::Context;
|
use crate::Context;
|
||||||
|
|
||||||
/// Evaluate a parsed source file into a module.
|
/// Evaluate a parsed source file into a module.
|
||||||
pub fn eval(ctx: &mut Context, file: FileId, ast: Rc<SyntaxTree>) -> TypResult<Module> {
|
pub fn eval(
|
||||||
let mut ctx = EvalContext::new(ctx, file);
|
ctx: &mut Context,
|
||||||
|
source: SourceId,
|
||||||
|
ast: Rc<SyntaxTree>,
|
||||||
|
) -> TypResult<Module> {
|
||||||
|
let mut ctx = EvalContext::new(ctx, source);
|
||||||
let template = ast.eval(&mut ctx)?;
|
let template = ast.eval(&mut ctx)?;
|
||||||
Ok(Module { scope: ctx.scopes.top, template })
|
Ok(Module { scope: ctx.scopes.top, template })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Caches evaluated modules.
|
/// Caches evaluated modules.
|
||||||
pub type ModuleCache = HashMap<FileId, Module>;
|
pub type ModuleCache = HashMap<SourceId, Module>;
|
||||||
|
|
||||||
/// An evaluated module, ready for importing or execution.
|
/// An evaluated module, ready for importing or execution.
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
@ -68,100 +73,99 @@ pub trait Eval {
|
|||||||
pub struct EvalContext<'a> {
|
pub struct EvalContext<'a> {
|
||||||
/// The loader from which resources (files and images) are loaded.
|
/// The loader from which resources (files and images) are loaded.
|
||||||
pub loader: &'a dyn Loader,
|
pub loader: &'a dyn Loader,
|
||||||
/// The store for source files.
|
/// Stores loaded source files.
|
||||||
pub sources: &'a mut SourceMap,
|
pub sources: &'a mut SourceStore,
|
||||||
/// The cache for decoded images.
|
/// Stores decoded images.
|
||||||
pub images: &'a mut ImageCache,
|
pub images: &'a mut ImageStore,
|
||||||
/// The cache for loaded modules.
|
/// Caches evaluated modules.
|
||||||
pub modules: &'a mut ModuleCache,
|
pub modules: &'a mut ModuleCache,
|
||||||
/// The active scopes.
|
/// The active scopes.
|
||||||
pub scopes: Scopes<'a>,
|
pub scopes: Scopes<'a>,
|
||||||
/// The currently evaluated file.
|
/// The id of the currently evaluated source file.
|
||||||
pub file: FileId,
|
pub source: SourceId,
|
||||||
/// The stack of imported files that led to evaluation of the current file.
|
/// The stack of imported files that led to evaluation of the current file.
|
||||||
pub route: Vec<FileId>,
|
pub route: Vec<SourceId>,
|
||||||
/// The expression map for the currently built template.
|
/// The expression map for the currently built template.
|
||||||
pub map: ExprMap,
|
pub map: ExprMap,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> EvalContext<'a> {
|
impl<'a> EvalContext<'a> {
|
||||||
/// Create a new evaluation context.
|
/// Create a new evaluation context.
|
||||||
pub fn new(ctx: &'a mut Context, file: FileId) -> Self {
|
pub fn new(ctx: &'a mut Context, source: SourceId) -> Self {
|
||||||
Self {
|
Self {
|
||||||
loader: ctx.loader.as_ref(),
|
loader: ctx.loader.as_ref(),
|
||||||
sources: &mut ctx.sources,
|
sources: &mut ctx.sources,
|
||||||
images: &mut ctx.images,
|
images: &mut ctx.images,
|
||||||
modules: &mut ctx.modules,
|
modules: &mut ctx.modules,
|
||||||
scopes: Scopes::new(Some(&ctx.std)),
|
scopes: Scopes::new(Some(&ctx.std)),
|
||||||
file,
|
source,
|
||||||
route: vec![],
|
route: vec![],
|
||||||
map: ExprMap::new(),
|
map: ExprMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve a path relative to the current file.
|
|
||||||
///
|
|
||||||
/// Returns an error if the file is not found.
|
|
||||||
pub fn resolve(&mut self, path: &str, span: Span) -> TypResult<FileId> {
|
|
||||||
self.loader
|
|
||||||
.resolve_from(self.file, Path::new(path))
|
|
||||||
.map_err(|_| Error::boxed(self.file, span, "file not found"))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Process an import of a module relative to the current location.
|
/// Process an import of a module relative to the current location.
|
||||||
pub fn import(&mut self, path: &str, span: Span) -> TypResult<FileId> {
|
pub fn import(&mut self, path: &str, span: Span) -> TypResult<SourceId> {
|
||||||
let file = self.resolve(path, span)?;
|
// Load the source file.
|
||||||
|
let full = self.relpath(path);
|
||||||
|
let id = self.sources.load(&full).map_err(|err| {
|
||||||
|
Error::boxed(self.source, span, match err.kind() {
|
||||||
|
io::ErrorKind::NotFound => "file not found".into(),
|
||||||
|
_ => format!("failed to load source file ({})", err),
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
// Prevent cyclic importing.
|
// Prevent cyclic importing.
|
||||||
if self.file == file || self.route.contains(&file) {
|
if self.source == id || self.route.contains(&id) {
|
||||||
bail!(self.file, span, "cyclic import");
|
bail!(self.source, span, "cyclic import");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check whether the module was already loaded.
|
// Check whether the module was already loaded.
|
||||||
if self.modules.get(&file).is_some() {
|
if self.modules.get(&id).is_some() {
|
||||||
return Ok(file);
|
return Ok(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load the source file.
|
|
||||||
let buffer = self
|
|
||||||
.loader
|
|
||||||
.load_file(file)
|
|
||||||
.map_err(|_| Error::boxed(self.file, span, "failed to load file"))?;
|
|
||||||
|
|
||||||
// Decode UTF-8.
|
|
||||||
let string = String::from_utf8(buffer)
|
|
||||||
.map_err(|_| Error::boxed(self.file, span, "file is not valid utf-8"))?;
|
|
||||||
|
|
||||||
// Parse the file.
|
// Parse the file.
|
||||||
let source = self.sources.insert(SourceFile::new(file, string));
|
let source = self.sources.get(id);
|
||||||
let ast = parse(&source)?;
|
let ast = parse(&source)?;
|
||||||
|
|
||||||
// Prepare the new context.
|
// Prepare the new context.
|
||||||
let new_scopes = Scopes::new(self.scopes.base);
|
let new_scopes = Scopes::new(self.scopes.base);
|
||||||
let old_scopes = mem::replace(&mut self.scopes, new_scopes);
|
let old_scopes = mem::replace(&mut self.scopes, new_scopes);
|
||||||
self.route.push(self.file);
|
self.route.push(self.source);
|
||||||
self.file = file;
|
self.source = id;
|
||||||
|
|
||||||
// Evaluate the module.
|
// Evaluate the module.
|
||||||
let result = Rc::new(ast).eval(self);
|
let result = Rc::new(ast).eval(self);
|
||||||
|
|
||||||
// Restore the old context.
|
// Restore the old context.
|
||||||
let new_scopes = mem::replace(&mut self.scopes, old_scopes);
|
let new_scopes = mem::replace(&mut self.scopes, old_scopes);
|
||||||
self.file = self.route.pop().unwrap();
|
self.source = self.route.pop().unwrap();
|
||||||
|
|
||||||
// Add a tracepoint to the errors.
|
// Add a tracepoint to the errors.
|
||||||
let template = result.map_err(|mut errors| {
|
let template = result.map_err(|mut errors| {
|
||||||
for error in errors.iter_mut() {
|
for error in errors.iter_mut() {
|
||||||
error.trace.push((self.file, span, Tracepoint::Import));
|
error.trace.push((self.source, span, Tracepoint::Import));
|
||||||
}
|
}
|
||||||
errors
|
errors
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Save the evaluated module.
|
// Save the evaluated module.
|
||||||
let module = Module { scope: new_scopes.top, template };
|
let module = Module { scope: new_scopes.top, template };
|
||||||
self.modules.insert(file, module);
|
self.modules.insert(id, module);
|
||||||
|
|
||||||
Ok(file)
|
Ok(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Complete a path that is relative to the current file to be relative to
|
||||||
|
/// the environment's current directory.
|
||||||
|
pub fn relpath(&self, path: impl AsRef<Path>) -> PathBuf {
|
||||||
|
self.sources
|
||||||
|
.get(self.source)
|
||||||
|
.path()
|
||||||
|
.parent()
|
||||||
|
.expect("is a file")
|
||||||
|
.join(path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -231,7 +235,7 @@ impl Eval for Expr {
|
|||||||
Self::Str(_, ref v) => Value::Str(v.clone()),
|
Self::Str(_, ref v) => Value::Str(v.clone()),
|
||||||
Self::Ident(ref v) => match ctx.scopes.get(&v) {
|
Self::Ident(ref v) => match ctx.scopes.get(&v) {
|
||||||
Some(slot) => slot.borrow().clone(),
|
Some(slot) => slot.borrow().clone(),
|
||||||
None => bail!(ctx.file, v.span, "unknown variable"),
|
None => bail!(ctx.source, v.span, "unknown variable"),
|
||||||
},
|
},
|
||||||
Self::Array(ref v) => Value::Array(v.eval(ctx)?),
|
Self::Array(ref v) => Value::Array(v.eval(ctx)?),
|
||||||
Self::Dict(ref v) => Value::Dict(v.eval(ctx)?),
|
Self::Dict(ref v) => Value::Dict(v.eval(ctx)?),
|
||||||
@ -300,7 +304,7 @@ impl Eval for BlockExpr {
|
|||||||
for expr in &self.exprs {
|
for expr in &self.exprs {
|
||||||
let value = expr.eval(ctx)?;
|
let value = expr.eval(ctx)?;
|
||||||
output = ops::join(output, value)
|
output = ops::join(output, value)
|
||||||
.map_err(Error::partial(ctx.file, expr.span()))?;
|
.map_err(Error::partial(ctx.source, expr.span()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.scoping {
|
if self.scoping {
|
||||||
@ -321,7 +325,7 @@ impl Eval for UnaryExpr {
|
|||||||
UnOp::Neg => ops::neg(value),
|
UnOp::Neg => ops::neg(value),
|
||||||
UnOp::Not => ops::not(value),
|
UnOp::Not => ops::not(value),
|
||||||
};
|
};
|
||||||
result.map_err(Error::partial(ctx.file, self.span))
|
result.map_err(Error::partial(ctx.source, self.span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -368,7 +372,7 @@ impl BinaryExpr {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let rhs = self.rhs.eval(ctx)?;
|
let rhs = self.rhs.eval(ctx)?;
|
||||||
op(lhs, rhs).map_err(Error::partial(ctx.file, self.span))
|
op(lhs, rhs).map_err(Error::partial(ctx.source, self.span))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Apply an assignment operation.
|
/// Apply an assignment operation.
|
||||||
@ -380,22 +384,22 @@ impl BinaryExpr {
|
|||||||
let slot = if let Expr::Ident(id) = self.lhs.as_ref() {
|
let slot = if let Expr::Ident(id) = self.lhs.as_ref() {
|
||||||
match ctx.scopes.get(id) {
|
match ctx.scopes.get(id) {
|
||||||
Some(slot) => Rc::clone(slot),
|
Some(slot) => Rc::clone(slot),
|
||||||
None => bail!(ctx.file, lspan, "unknown variable"),
|
None => bail!(ctx.source, lspan, "unknown variable"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
bail!(ctx.file, lspan, "cannot assign to this expression",);
|
bail!(ctx.source, lspan, "cannot assign to this expression",);
|
||||||
};
|
};
|
||||||
|
|
||||||
let rhs = self.rhs.eval(ctx)?;
|
let rhs = self.rhs.eval(ctx)?;
|
||||||
let mut mutable = match slot.try_borrow_mut() {
|
let mut mutable = match slot.try_borrow_mut() {
|
||||||
Ok(mutable) => mutable,
|
Ok(mutable) => mutable,
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
bail!(ctx.file, lspan, "cannot assign to a constant",);
|
bail!(ctx.source, lspan, "cannot assign to a constant",);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let lhs = mem::take(&mut *mutable);
|
let lhs = mem::take(&mut *mutable);
|
||||||
*mutable = op(lhs, rhs).map_err(Error::partial(ctx.file, self.span))?;
|
*mutable = op(lhs, rhs).map_err(Error::partial(ctx.source, self.span))?;
|
||||||
|
|
||||||
Ok(Value::None)
|
Ok(Value::None)
|
||||||
}
|
}
|
||||||
@ -409,18 +413,18 @@ impl Eval for CallExpr {
|
|||||||
.callee
|
.callee
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<Function>()
|
.cast::<Function>()
|
||||||
.map_err(Error::partial(ctx.file, self.callee.span()))?;
|
.map_err(Error::partial(ctx.source, self.callee.span()))?;
|
||||||
|
|
||||||
let mut args = self.args.eval(ctx)?;
|
let mut args = self.args.eval(ctx)?;
|
||||||
let returned = callee(ctx, &mut args).map_err(|mut errors| {
|
let returned = callee(ctx, &mut args).map_err(|mut errors| {
|
||||||
for error in errors.iter_mut() {
|
for error in errors.iter_mut() {
|
||||||
// Skip errors directly related to arguments.
|
// Skip errors directly related to arguments.
|
||||||
if error.file == ctx.file && self.span.contains(error.span) {
|
if error.source == ctx.source && self.span.contains(error.span) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
error.trace.push((
|
error.trace.push((
|
||||||
ctx.file,
|
ctx.source,
|
||||||
self.span,
|
self.span,
|
||||||
Tracepoint::Call(callee.name().map(Into::into)),
|
Tracepoint::Call(callee.name().map(Into::into)),
|
||||||
));
|
));
|
||||||
@ -439,7 +443,7 @@ impl Eval for CallArgs {
|
|||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
Ok(FuncArgs {
|
Ok(FuncArgs {
|
||||||
file: ctx.file,
|
source: ctx.source,
|
||||||
span: self.span,
|
span: self.span,
|
||||||
items: self
|
items: self
|
||||||
.items
|
.items
|
||||||
@ -473,7 +477,7 @@ impl Eval for ClosureExpr {
|
|||||||
type Output = Value;
|
type Output = Value;
|
||||||
|
|
||||||
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
fn eval(&self, ctx: &mut EvalContext) -> TypResult<Self::Output> {
|
||||||
let file = ctx.file;
|
let file = ctx.source;
|
||||||
let params = Rc::clone(&self.params);
|
let params = Rc::clone(&self.params);
|
||||||
let body = Rc::clone(&self.body);
|
let body = Rc::clone(&self.body);
|
||||||
|
|
||||||
@ -489,7 +493,7 @@ impl Eval for ClosureExpr {
|
|||||||
// Don't leak the scopes from the call site. Instead, we use the
|
// Don't leak the scopes from the call site. Instead, we use the
|
||||||
// scope of captured variables we collected earlier.
|
// scope of captured variables we collected earlier.
|
||||||
let prev_scopes = mem::take(&mut ctx.scopes);
|
let prev_scopes = mem::take(&mut ctx.scopes);
|
||||||
let prev_file = mem::replace(&mut ctx.file, file);
|
let prev_file = mem::replace(&mut ctx.source, file);
|
||||||
ctx.scopes.top = captured.clone();
|
ctx.scopes.top = captured.clone();
|
||||||
|
|
||||||
for param in params.iter() {
|
for param in params.iter() {
|
||||||
@ -499,7 +503,7 @@ impl Eval for ClosureExpr {
|
|||||||
|
|
||||||
let result = body.eval(ctx);
|
let result = body.eval(ctx);
|
||||||
ctx.scopes = prev_scopes;
|
ctx.scopes = prev_scopes;
|
||||||
ctx.file = prev_file;
|
ctx.source = prev_file;
|
||||||
result
|
result
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -515,7 +519,7 @@ impl Eval for WithExpr {
|
|||||||
.callee
|
.callee
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<Function>()
|
.cast::<Function>()
|
||||||
.map_err(Error::partial(ctx.file, self.callee.span()))?;
|
.map_err(Error::partial(ctx.source, self.callee.span()))?;
|
||||||
|
|
||||||
let applied = self.args.eval(ctx)?;
|
let applied = self.args.eval(ctx)?;
|
||||||
|
|
||||||
@ -565,7 +569,7 @@ impl Eval for IfExpr {
|
|||||||
.condition
|
.condition
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<bool>()
|
.cast::<bool>()
|
||||||
.map_err(Error::partial(ctx.file, self.condition.span()))?;
|
.map_err(Error::partial(ctx.source, self.condition.span()))?;
|
||||||
|
|
||||||
if condition {
|
if condition {
|
||||||
self.if_body.eval(ctx)
|
self.if_body.eval(ctx)
|
||||||
@ -587,11 +591,11 @@ impl Eval for WhileExpr {
|
|||||||
.condition
|
.condition
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<bool>()
|
.cast::<bool>()
|
||||||
.map_err(Error::partial(ctx.file, self.condition.span()))?
|
.map_err(Error::partial(ctx.source, self.condition.span()))?
|
||||||
{
|
{
|
||||||
let value = self.body.eval(ctx)?;
|
let value = self.body.eval(ctx)?;
|
||||||
output = ops::join(output, value)
|
output = ops::join(output, value)
|
||||||
.map_err(Error::partial(ctx.file, self.body.span()))?;
|
.map_err(Error::partial(ctx.source, self.body.span()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(output)
|
Ok(output)
|
||||||
@ -613,7 +617,7 @@ impl Eval for ForExpr {
|
|||||||
|
|
||||||
let value = self.body.eval(ctx)?;
|
let value = self.body.eval(ctx)?;
|
||||||
output = ops::join(output, value)
|
output = ops::join(output, value)
|
||||||
.map_err(Error::partial(ctx.file, self.body.span()))?;
|
.map_err(Error::partial(ctx.source, self.body.span()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx.scopes.exit();
|
ctx.scopes.exit();
|
||||||
@ -639,10 +643,10 @@ impl Eval for ForExpr {
|
|||||||
iter!(for (k => key, v => value) in dict.into_iter())
|
iter!(for (k => key, v => value) in dict.into_iter())
|
||||||
}
|
}
|
||||||
(ForPattern::KeyValue(_, _), Value::Str(_)) => {
|
(ForPattern::KeyValue(_, _), Value::Str(_)) => {
|
||||||
bail!(ctx.file, self.pattern.span(), "mismatched pattern");
|
bail!(ctx.source, self.pattern.span(), "mismatched pattern");
|
||||||
}
|
}
|
||||||
(_, iter) => bail!(
|
(_, iter) => bail!(
|
||||||
ctx.file,
|
ctx.source,
|
||||||
self.iter.span(),
|
self.iter.span(),
|
||||||
"cannot loop over {}",
|
"cannot loop over {}",
|
||||||
iter.type_name(),
|
iter.type_name(),
|
||||||
@ -659,7 +663,7 @@ impl Eval for ImportExpr {
|
|||||||
.path
|
.path
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<EcoString>()
|
.cast::<EcoString>()
|
||||||
.map_err(Error::partial(ctx.file, self.path.span()))?;
|
.map_err(Error::partial(ctx.source, self.path.span()))?;
|
||||||
|
|
||||||
let file = ctx.import(&path, self.path.span())?;
|
let file = ctx.import(&path, self.path.span())?;
|
||||||
let module = &ctx.modules[&file];
|
let module = &ctx.modules[&file];
|
||||||
@ -675,7 +679,7 @@ impl Eval for ImportExpr {
|
|||||||
if let Some(slot) = module.scope.get(&ident) {
|
if let Some(slot) = module.scope.get(&ident) {
|
||||||
ctx.scopes.def_mut(ident.as_str(), slot.borrow().clone());
|
ctx.scopes.def_mut(ident.as_str(), slot.borrow().clone());
|
||||||
} else {
|
} else {
|
||||||
bail!(ctx.file, ident.span, "unresolved import");
|
bail!(ctx.source, ident.span, "unresolved import");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -693,7 +697,7 @@ impl Eval for IncludeExpr {
|
|||||||
.path
|
.path
|
||||||
.eval(ctx)?
|
.eval(ctx)?
|
||||||
.cast::<EcoString>()
|
.cast::<EcoString>()
|
||||||
.map_err(Error::partial(ctx.file, self.path.span()))?;
|
.map_err(Error::partial(ctx.source, self.path.span()))?;
|
||||||
|
|
||||||
let file = ctx.import(&path, self.path.span())?;
|
let file = ctx.import(&path, self.path.span())?;
|
||||||
let module = &ctx.modules[&file];
|
let module = &ctx.modules[&file];
|
||||||
|
@ -14,17 +14,17 @@ use pdf_writer::{
|
|||||||
use ttf_parser::{name_id, GlyphId};
|
use ttf_parser::{name_id, GlyphId};
|
||||||
|
|
||||||
use crate::color::Color;
|
use crate::color::Color;
|
||||||
use crate::font::{Em, FaceId, FontCache};
|
use crate::font::{Em, FaceId, FontStore};
|
||||||
use crate::geom::{self, Length, Size};
|
use crate::geom::{self, Length, Size};
|
||||||
use crate::image::{Image, ImageCache, ImageId};
|
use crate::image::{Image, ImageId, ImageStore};
|
||||||
use crate::layout::{Element, Frame, Geometry, Paint};
|
use crate::layout::{Element, Frame, Geometry, Paint};
|
||||||
use crate::Context;
|
use crate::Context;
|
||||||
|
|
||||||
/// Export a collection of frames into a PDF document.
|
/// Export a collection of frames into a PDF document.
|
||||||
///
|
///
|
||||||
/// This creates one page per frame. In addition to the frames, you need to pass
|
/// This creates one page per frame. In addition to the frames, you need to pass
|
||||||
/// in the cache used during compilation such that things like fonts and images
|
/// in the context used during compilation such that things like fonts and
|
||||||
/// can be included in the PDF.
|
/// images can be included in the PDF.
|
||||||
///
|
///
|
||||||
/// Returns the raw bytes making up the PDF document.
|
/// Returns the raw bytes making up the PDF document.
|
||||||
pub fn pdf(ctx: &Context, frames: &[Rc<Frame>]) -> Vec<u8> {
|
pub fn pdf(ctx: &Context, frames: &[Rc<Frame>]) -> Vec<u8> {
|
||||||
@ -33,19 +33,16 @@ pub fn pdf(ctx: &Context, frames: &[Rc<Frame>]) -> Vec<u8> {
|
|||||||
|
|
||||||
struct PdfExporter<'a> {
|
struct PdfExporter<'a> {
|
||||||
writer: PdfWriter,
|
writer: PdfWriter,
|
||||||
frames: &'a [Rc<Frame>],
|
|
||||||
fonts: &'a FontCache,
|
|
||||||
font_map: Remapper<FaceId>,
|
|
||||||
images: &'a ImageCache,
|
|
||||||
image_map: Remapper<ImageId>,
|
|
||||||
refs: Refs,
|
refs: Refs,
|
||||||
|
frames: &'a [Rc<Frame>],
|
||||||
|
fonts: &'a FontStore,
|
||||||
|
images: &'a ImageStore,
|
||||||
|
font_map: Remapper<FaceId>,
|
||||||
|
image_map: Remapper<ImageId>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PdfExporter<'a> {
|
impl<'a> PdfExporter<'a> {
|
||||||
fn new(ctx: &'a Context, frames: &'a [Rc<Frame>]) -> Self {
|
fn new(ctx: &'a Context, frames: &'a [Rc<Frame>]) -> Self {
|
||||||
let mut writer = PdfWriter::new(1, 7);
|
|
||||||
writer.set_indent(2);
|
|
||||||
|
|
||||||
let mut font_map = Remapper::new();
|
let mut font_map = Remapper::new();
|
||||||
let mut image_map = Remapper::new();
|
let mut image_map = Remapper::new();
|
||||||
let mut alpha_masks = 0;
|
let mut alpha_masks = 0;
|
||||||
@ -66,14 +63,15 @@ impl<'a> PdfExporter<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let refs = Refs::new(frames.len(), font_map.len(), image_map.len(), alpha_masks);
|
let mut writer = PdfWriter::new(1, 7);
|
||||||
|
writer.set_indent(2);
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
writer,
|
writer,
|
||||||
|
refs: Refs::new(frames.len(), font_map.len(), image_map.len(), alpha_masks),
|
||||||
frames,
|
frames,
|
||||||
fonts: &ctx.fonts,
|
fonts: &ctx.fonts,
|
||||||
images: &ctx.images,
|
images: &ctx.images,
|
||||||
refs,
|
|
||||||
font_map,
|
font_map,
|
||||||
image_map,
|
image_map,
|
||||||
}
|
}
|
||||||
|
340
src/font.rs
340
src/font.rs
@ -3,13 +3,151 @@
|
|||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::{hash_map::Entry, HashMap};
|
||||||
use std::fmt::{self, Debug, Display, Formatter};
|
use std::fmt::{self, Debug, Display, Formatter};
|
||||||
use std::ops::Add;
|
use std::ops::Add;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use decorum::N64;
|
use decorum::N64;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::geom::Length;
|
use crate::geom::Length;
|
||||||
use crate::loading::{FileId, Loader};
|
use crate::loading::{FileHash, Loader};
|
||||||
|
|
||||||
|
/// A unique identifier for a loaded font face.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct FaceId(u32);
|
||||||
|
|
||||||
|
impl FaceId {
|
||||||
|
/// Create a face id from the raw underlying value.
|
||||||
|
///
|
||||||
|
/// This should only be called with values returned by
|
||||||
|
/// [`into_raw`](Self::into_raw).
|
||||||
|
pub const fn from_raw(v: u32) -> Self {
|
||||||
|
Self(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert into the raw underlying value.
|
||||||
|
pub const fn into_raw(self) -> u32 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Storage for loaded and parsed font faces.
|
||||||
|
pub struct FontStore {
|
||||||
|
loader: Rc<dyn Loader>,
|
||||||
|
faces: Vec<Option<Face>>,
|
||||||
|
families: HashMap<String, Vec<FaceId>>,
|
||||||
|
buffers: HashMap<FileHash, Rc<Vec<u8>>>,
|
||||||
|
on_load: Option<Box<dyn Fn(FaceId, &Face)>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FontStore {
|
||||||
|
/// Create a new, empty font store.
|
||||||
|
pub fn new(loader: Rc<dyn Loader>) -> Self {
|
||||||
|
let mut faces = vec![];
|
||||||
|
let mut families = HashMap::<String, Vec<FaceId>>::new();
|
||||||
|
|
||||||
|
for (i, info) in loader.faces().iter().enumerate() {
|
||||||
|
let id = FaceId(i as u32);
|
||||||
|
faces.push(None);
|
||||||
|
families
|
||||||
|
.entry(info.family.to_lowercase())
|
||||||
|
.and_modify(|vec| vec.push(id))
|
||||||
|
.or_insert_with(|| vec![id]);
|
||||||
|
}
|
||||||
|
|
||||||
|
Self {
|
||||||
|
loader,
|
||||||
|
faces,
|
||||||
|
families,
|
||||||
|
buffers: HashMap::new(),
|
||||||
|
on_load: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a callback which is invoked each time a font face is loaded.
|
||||||
|
pub fn on_load<F>(&mut self, f: F)
|
||||||
|
where
|
||||||
|
F: Fn(FaceId, &Face) + 'static,
|
||||||
|
{
|
||||||
|
self.on_load = Some(Box::new(f));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Query for and load the font face from the given `family` that most
|
||||||
|
/// closely matches the given `variant`.
|
||||||
|
pub fn select(&mut self, family: &str, variant: FontVariant) -> Option<FaceId> {
|
||||||
|
// Check whether a family with this name exists.
|
||||||
|
let ids = self.families.get(family)?;
|
||||||
|
let infos = self.loader.faces();
|
||||||
|
|
||||||
|
let mut best = None;
|
||||||
|
let mut best_key = None;
|
||||||
|
|
||||||
|
// Find the best matching variant of this font.
|
||||||
|
for &id in ids {
|
||||||
|
let current = infos[id.0 as usize].variant;
|
||||||
|
|
||||||
|
// This is a perfect match, no need to search further.
|
||||||
|
if current == variant {
|
||||||
|
best = Some(id);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If this is not a perfect match, we compute a key that we want to
|
||||||
|
// minimize among all variants. This key prioritizes style, then
|
||||||
|
// stretch distance and then weight distance.
|
||||||
|
let key = (
|
||||||
|
current.style != variant.style,
|
||||||
|
current.stretch.distance(variant.stretch),
|
||||||
|
current.weight.distance(variant.weight),
|
||||||
|
);
|
||||||
|
|
||||||
|
if best_key.map_or(true, |b| key < b) {
|
||||||
|
best = Some(id);
|
||||||
|
best_key = Some(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = best?;
|
||||||
|
|
||||||
|
// Load the face if it's not already loaded.
|
||||||
|
let idx = id.0 as usize;
|
||||||
|
let slot = &mut self.faces[idx];
|
||||||
|
if slot.is_none() {
|
||||||
|
let FaceInfo { ref path, index, .. } = infos[idx];
|
||||||
|
|
||||||
|
// Check the buffer cache since multiple faces may
|
||||||
|
// refer to the same data (font collection).
|
||||||
|
let hash = self.loader.resolve(path).ok()?;
|
||||||
|
let buffer = match self.buffers.entry(hash) {
|
||||||
|
Entry::Occupied(entry) => entry.into_mut(),
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
let buffer = self.loader.load(path).ok()?;
|
||||||
|
entry.insert(Rc::new(buffer))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let face = Face::new(Rc::clone(buffer), index)?;
|
||||||
|
if let Some(callback) = &self.on_load {
|
||||||
|
callback(id, &face);
|
||||||
|
}
|
||||||
|
|
||||||
|
*slot = Some(face);
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to a loaded face.
|
||||||
|
///
|
||||||
|
/// This panics if no face with this id was loaded. This function should
|
||||||
|
/// only be called with ids returned by this store's
|
||||||
|
/// [`select()`](Self::select) method.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn get(&self, id: FaceId) -> &Face {
|
||||||
|
self.faces[id.0 as usize].as_ref().expect("font face was not loaded")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A font face.
|
/// A font face.
|
||||||
pub struct Face {
|
pub struct Face {
|
||||||
@ -53,18 +191,20 @@ impl Face {
|
|||||||
let cap_height = ttf.capital_height().filter(|&h| h > 0).map_or(ascender, to_em);
|
let cap_height = ttf.capital_height().filter(|&h| h > 0).map_or(ascender, to_em);
|
||||||
let x_height = ttf.x_height().filter(|&h| h > 0).map_or(ascender, to_em);
|
let x_height = ttf.x_height().filter(|&h| h > 0).map_or(ascender, to_em);
|
||||||
let descender = to_em(ttf.typographic_descender().unwrap_or(ttf.descender()));
|
let descender = to_em(ttf.typographic_descender().unwrap_or(ttf.descender()));
|
||||||
|
|
||||||
let strikeout = ttf.strikeout_metrics();
|
let strikeout = ttf.strikeout_metrics();
|
||||||
let underline = ttf.underline_metrics();
|
let underline = ttf.underline_metrics();
|
||||||
let default = Em::new(0.06);
|
|
||||||
|
|
||||||
let strikethrough = LineMetrics {
|
let strikethrough = LineMetrics {
|
||||||
strength: strikeout.or(underline).map_or(default, |s| to_em(s.thickness)),
|
strength: strikeout
|
||||||
|
.or(underline)
|
||||||
|
.map_or(Em::new(0.06), |s| to_em(s.thickness)),
|
||||||
position: strikeout.map_or(Em::new(0.25), |s| to_em(s.position)),
|
position: strikeout.map_or(Em::new(0.25), |s| to_em(s.position)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let underline = LineMetrics {
|
let underline = LineMetrics {
|
||||||
strength: underline.or(strikeout).map_or(default, |s| to_em(s.thickness)),
|
strength: underline
|
||||||
|
.or(strikeout)
|
||||||
|
.map_or(Em::new(0.06), |s| to_em(s.thickness)),
|
||||||
position: underline.map_or(Em::new(-0.2), |s| to_em(s.position)),
|
position: underline.map_or(Em::new(-0.2), |s| to_em(s.position)),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -127,39 +267,6 @@ impl Face {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Identifies a vertical metric of a font.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
||||||
pub enum VerticalFontMetric {
|
|
||||||
/// The distance from the baseline to the typographic ascender.
|
|
||||||
///
|
|
||||||
/// Corresponds to the typographic ascender from the `OS/2` table if present
|
|
||||||
/// and falls back to the ascender from the `hhea` table otherwise.
|
|
||||||
Ascender,
|
|
||||||
/// The approximate height of uppercase letters.
|
|
||||||
CapHeight,
|
|
||||||
/// The approximate height of non-ascending lowercase letters.
|
|
||||||
XHeight,
|
|
||||||
/// The baseline on which the letters rest.
|
|
||||||
Baseline,
|
|
||||||
/// The distance from the baseline to the typographic descender.
|
|
||||||
///
|
|
||||||
/// Corresponds to the typographic descender from the `OS/2` table if
|
|
||||||
/// present and falls back to the descender from the `hhea` table otherwise.
|
|
||||||
Descender,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for VerticalFontMetric {
|
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
|
||||||
f.pad(match self {
|
|
||||||
Self::Ascender => "ascender",
|
|
||||||
Self::CapHeight => "cap-height",
|
|
||||||
Self::XHeight => "x-height",
|
|
||||||
Self::Baseline => "baseline",
|
|
||||||
Self::Descender => "descender",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A length in em units.
|
/// A length in em units.
|
||||||
///
|
///
|
||||||
/// `1em` is the same as the font size.
|
/// `1em` is the same as the font size.
|
||||||
@ -201,137 +308,36 @@ impl Add for Em {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Caches parsed font faces.
|
/// Identifies a vertical metric of a font.
|
||||||
pub struct FontCache {
|
|
||||||
loader: Rc<dyn Loader>,
|
|
||||||
faces: Vec<Option<Face>>,
|
|
||||||
families: HashMap<String, Vec<FaceId>>,
|
|
||||||
buffers: HashMap<FileId, Rc<Vec<u8>>>,
|
|
||||||
on_load: Option<Box<dyn Fn(FaceId, &Face)>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FontCache {
|
|
||||||
/// Create a new, empty font cache.
|
|
||||||
pub fn new(loader: Rc<dyn Loader>) -> Self {
|
|
||||||
let mut faces = vec![];
|
|
||||||
let mut families = HashMap::<String, Vec<FaceId>>::new();
|
|
||||||
|
|
||||||
for (i, info) in loader.faces().iter().enumerate() {
|
|
||||||
let id = FaceId(i as u64);
|
|
||||||
faces.push(None);
|
|
||||||
families
|
|
||||||
.entry(info.family.to_lowercase())
|
|
||||||
.and_modify(|vec| vec.push(id))
|
|
||||||
.or_insert_with(|| vec![id]);
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
|
||||||
loader,
|
|
||||||
faces,
|
|
||||||
families,
|
|
||||||
buffers: HashMap::new(),
|
|
||||||
on_load: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Query for and load the font face from the given `family` that most
|
|
||||||
/// closely matches the given `variant`.
|
|
||||||
pub fn select(&mut self, family: &str, variant: FontVariant) -> Option<FaceId> {
|
|
||||||
// Check whether a family with this name exists.
|
|
||||||
let ids = self.families.get(family)?;
|
|
||||||
let infos = self.loader.faces();
|
|
||||||
|
|
||||||
let mut best = None;
|
|
||||||
let mut best_key = None;
|
|
||||||
|
|
||||||
// Find the best matching variant of this font.
|
|
||||||
for &id in ids {
|
|
||||||
let current = infos[id.0 as usize].variant;
|
|
||||||
|
|
||||||
// This is a perfect match, no need to search further.
|
|
||||||
if current == variant {
|
|
||||||
best = Some(id);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this is not a perfect match, we compute a key that we want to
|
|
||||||
// minimize among all variants. This key prioritizes style, then
|
|
||||||
// stretch distance and then weight distance.
|
|
||||||
let key = (
|
|
||||||
current.style != variant.style,
|
|
||||||
current.stretch.distance(variant.stretch),
|
|
||||||
current.weight.distance(variant.weight),
|
|
||||||
);
|
|
||||||
|
|
||||||
if best_key.map_or(true, |b| key < b) {
|
|
||||||
best = Some(id);
|
|
||||||
best_key = Some(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load the face if it's not already loaded.
|
|
||||||
let id = best?;
|
|
||||||
let idx = id.0 as usize;
|
|
||||||
let slot = &mut self.faces[idx];
|
|
||||||
if slot.is_none() {
|
|
||||||
let FaceInfo { file, index, .. } = infos[idx];
|
|
||||||
|
|
||||||
// Check the buffer cache since multiple faces may
|
|
||||||
// refer to the same data (font collection).
|
|
||||||
let buffer = match self.buffers.entry(file) {
|
|
||||||
Entry::Occupied(entry) => entry.into_mut(),
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
let buffer = self.loader.load_file(file).ok()?;
|
|
||||||
entry.insert(Rc::new(buffer))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let face = Face::new(Rc::clone(buffer), index)?;
|
|
||||||
if let Some(callback) = &self.on_load {
|
|
||||||
callback(id, &face);
|
|
||||||
}
|
|
||||||
|
|
||||||
*slot = Some(face);
|
|
||||||
}
|
|
||||||
|
|
||||||
best
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to a loaded face.
|
|
||||||
///
|
|
||||||
/// This panics if no face with this id was loaded. This function should
|
|
||||||
/// only be called with ids returned by [`select()`](Self::select).
|
|
||||||
#[track_caller]
|
|
||||||
pub fn get(&self, id: FaceId) -> &Face {
|
|
||||||
self.faces[id.0 as usize].as_ref().expect("font face was not loaded")
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register a callback which is invoked each time a font face is loaded.
|
|
||||||
pub fn on_load<F>(&mut self, f: F)
|
|
||||||
where
|
|
||||||
F: Fn(FaceId, &Face) + 'static,
|
|
||||||
{
|
|
||||||
self.on_load = Some(Box::new(f));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unique identifier for a loaded font face.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
#[derive(Serialize, Deserialize)]
|
pub enum VerticalFontMetric {
|
||||||
pub struct FaceId(u64);
|
/// The distance from the baseline to the typographic ascender.
|
||||||
|
|
||||||
impl FaceId {
|
|
||||||
/// Create a face id from the raw underlying value.
|
|
||||||
///
|
///
|
||||||
/// This should only be called with values returned by
|
/// Corresponds to the typographic ascender from the `OS/2` table if present
|
||||||
/// [`into_raw`](Self::into_raw).
|
/// and falls back to the ascender from the `hhea` table otherwise.
|
||||||
pub const fn from_raw(v: u64) -> Self {
|
Ascender,
|
||||||
Self(v)
|
/// The approximate height of uppercase letters.
|
||||||
|
CapHeight,
|
||||||
|
/// The approximate height of non-ascending lowercase letters.
|
||||||
|
XHeight,
|
||||||
|
/// The baseline on which the letters rest.
|
||||||
|
Baseline,
|
||||||
|
/// The distance from the baseline to the typographic descender.
|
||||||
|
///
|
||||||
|
/// Corresponds to the typographic descender from the `OS/2` table if
|
||||||
|
/// present and falls back to the descender from the `hhea` table otherwise.
|
||||||
|
Descender,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert into the raw underlying value.
|
impl Display for VerticalFontMetric {
|
||||||
pub const fn into_raw(self) -> u64 {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
self.0
|
f.pad(match self {
|
||||||
|
Self::Ascender => "ascender",
|
||||||
|
Self::CapHeight => "cap-height",
|
||||||
|
Self::XHeight => "x-height",
|
||||||
|
Self::Baseline => "baseline",
|
||||||
|
Self::Descender => "descender",
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -358,8 +364,8 @@ impl Display for FontFamily {
|
|||||||
/// Properties of a single font face.
|
/// Properties of a single font face.
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct FaceInfo {
|
pub struct FaceInfo {
|
||||||
/// The font file.
|
/// The path to the font file.
|
||||||
pub file: FileId,
|
pub path: PathBuf,
|
||||||
/// The collection index in the font file.
|
/// The collection index in the font file.
|
||||||
pub index: u32,
|
pub index: u32,
|
||||||
/// The typographic font family this face is part of.
|
/// The typographic font family this face is part of.
|
||||||
|
169
src/image.rs
169
src/image.rs
@ -2,14 +2,91 @@
|
|||||||
|
|
||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::{hash_map::Entry, HashMap};
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
use std::io::Cursor;
|
use std::io;
|
||||||
|
use std::path::Path;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use image::io::Reader as ImageReader;
|
use image::io::Reader as ImageReader;
|
||||||
use image::{DynamicImage, GenericImageView, ImageFormat};
|
use image::{DynamicImage, GenericImageView, ImageFormat};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::loading::{FileId, Loader};
|
use crate::loading::{FileHash, Loader};
|
||||||
|
|
||||||
|
/// A unique identifier for a loaded image.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct ImageId(u32);
|
||||||
|
|
||||||
|
impl ImageId {
|
||||||
|
/// Create an image id from the raw underlying value.
|
||||||
|
///
|
||||||
|
/// This should only be called with values returned by
|
||||||
|
/// [`into_raw`](Self::into_raw).
|
||||||
|
pub const fn from_raw(v: u32) -> Self {
|
||||||
|
Self(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Convert into the raw underlying value.
|
||||||
|
pub const fn into_raw(self) -> u32 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Storage for loaded and decoded images.
|
||||||
|
pub struct ImageStore {
|
||||||
|
loader: Rc<dyn Loader>,
|
||||||
|
files: HashMap<FileHash, ImageId>,
|
||||||
|
images: Vec<Image>,
|
||||||
|
on_load: Option<Box<dyn Fn(ImageId, &Image)>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ImageStore {
|
||||||
|
/// Create a new, empty image store.
|
||||||
|
pub fn new(loader: Rc<dyn Loader>) -> Self {
|
||||||
|
Self {
|
||||||
|
loader,
|
||||||
|
files: HashMap::new(),
|
||||||
|
images: vec![],
|
||||||
|
on_load: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Register a callback which is invoked each time an image is loaded.
|
||||||
|
pub fn on_load<F>(&mut self, f: F)
|
||||||
|
where
|
||||||
|
F: Fn(ImageId, &Image) + 'static,
|
||||||
|
{
|
||||||
|
self.on_load = Some(Box::new(f));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load and decode an image file from a path.
|
||||||
|
pub fn load(&mut self, path: &Path) -> io::Result<ImageId> {
|
||||||
|
let hash = self.loader.resolve(path)?;
|
||||||
|
Ok(*match self.files.entry(hash) {
|
||||||
|
Entry::Occupied(entry) => entry.into_mut(),
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
let buffer = self.loader.load(path)?;
|
||||||
|
let image = Image::parse(&buffer)?;
|
||||||
|
let id = ImageId(self.images.len() as u32);
|
||||||
|
if let Some(callback) = &self.on_load {
|
||||||
|
callback(id, &image);
|
||||||
|
}
|
||||||
|
self.images.push(image);
|
||||||
|
entry.insert(id)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to a loaded image.
|
||||||
|
///
|
||||||
|
/// This panics if no image with this id was loaded. This function should
|
||||||
|
/// only be called with ids returned by this store's [`load()`](Self::load)
|
||||||
|
/// method.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn get(&self, id: ImageId) -> &Image {
|
||||||
|
&self.images[id.0 as usize]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A loaded image.
|
/// A loaded image.
|
||||||
pub struct Image {
|
pub struct Image {
|
||||||
@ -23,12 +100,19 @@ impl Image {
|
|||||||
/// Parse an image from raw data in a supported format (PNG or JPEG).
|
/// Parse an image from raw data in a supported format (PNG or JPEG).
|
||||||
///
|
///
|
||||||
/// The image format is determined automatically.
|
/// The image format is determined automatically.
|
||||||
pub fn parse(data: &[u8]) -> Option<Self> {
|
pub fn parse(data: &[u8]) -> io::Result<Self> {
|
||||||
let cursor = Cursor::new(data);
|
let cursor = io::Cursor::new(data);
|
||||||
let reader = ImageReader::new(cursor).with_guessed_format().ok()?;
|
let reader = ImageReader::new(cursor).with_guessed_format()?;
|
||||||
let format = reader.format()?;
|
|
||||||
let buf = reader.decode().ok()?;
|
let format = reader.format().ok_or_else(|| {
|
||||||
Some(Self { format, buf })
|
io::Error::new(io::ErrorKind::InvalidData, "unknown image format")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let buf = reader
|
||||||
|
.decode()
|
||||||
|
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||||
|
|
||||||
|
Ok(Self { format, buf })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The width of the image.
|
/// The width of the image.
|
||||||
@ -52,72 +136,3 @@ impl Debug for Image {
|
|||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Caches decoded images.
|
|
||||||
pub struct ImageCache {
|
|
||||||
loader: Rc<dyn Loader>,
|
|
||||||
images: HashMap<ImageId, Image>,
|
|
||||||
on_load: Option<Box<dyn Fn(ImageId, &Image)>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ImageCache {
|
|
||||||
/// Create a new, empty image cache.
|
|
||||||
pub fn new(loader: Rc<dyn Loader>) -> Self {
|
|
||||||
Self {
|
|
||||||
loader,
|
|
||||||
images: HashMap::new(),
|
|
||||||
on_load: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Load and decode an image file from a path.
|
|
||||||
pub fn load(&mut self, file: FileId) -> Option<ImageId> {
|
|
||||||
let id = ImageId(file.into_raw());
|
|
||||||
if let Entry::Vacant(entry) = self.images.entry(id) {
|
|
||||||
let buffer = self.loader.load_file(file).ok()?;
|
|
||||||
let image = Image::parse(&buffer)?;
|
|
||||||
if let Some(callback) = &self.on_load {
|
|
||||||
callback(id, &image);
|
|
||||||
}
|
|
||||||
entry.insert(image);
|
|
||||||
}
|
|
||||||
Some(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a reference to a loaded image.
|
|
||||||
///
|
|
||||||
/// This panics if no image with this id was loaded. This function should
|
|
||||||
/// only be called with ids returned by [`load()`](Self::load).
|
|
||||||
#[track_caller]
|
|
||||||
pub fn get(&self, id: ImageId) -> &Image {
|
|
||||||
&self.images[&id]
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Register a callback which is invoked each time an image is loaded.
|
|
||||||
pub fn on_load<F>(&mut self, f: F)
|
|
||||||
where
|
|
||||||
F: Fn(ImageId, &Image) + 'static,
|
|
||||||
{
|
|
||||||
self.on_load = Some(Box::new(f));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A unique identifier for a loaded image.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct ImageId(u64);
|
|
||||||
|
|
||||||
impl ImageId {
|
|
||||||
/// Create an image id from the raw underlying value.
|
|
||||||
///
|
|
||||||
/// This should only be called with values returned by
|
|
||||||
/// [`into_raw`](Self::into_raw).
|
|
||||||
pub const fn from_raw(v: u64) -> Self {
|
|
||||||
Self(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert into the raw underlying value.
|
|
||||||
pub const fn into_raw(self) -> u64 {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::HashMap;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -68,13 +68,10 @@ impl LayoutCache {
|
|||||||
frames: Vec<Constrained<Rc<Frame>>>,
|
frames: Vec<Constrained<Rc<Frame>>>,
|
||||||
level: usize,
|
level: usize,
|
||||||
) {
|
) {
|
||||||
let entry = FramesEntry::new(frames, level);
|
self.frames
|
||||||
match self.frames.entry(hash) {
|
.entry(hash)
|
||||||
Entry::Occupied(occupied) => occupied.into_mut().push(entry),
|
.or_default()
|
||||||
Entry::Vacant(vacant) => {
|
.push(FramesEntry::new(frames, level));
|
||||||
vacant.insert(vec![entry]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear the cache.
|
/// Clear the cache.
|
||||||
|
@ -29,9 +29,9 @@ use std::hash::Hash;
|
|||||||
use std::hash::Hasher;
|
use std::hash::Hasher;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::font::FontCache;
|
use crate::font::FontStore;
|
||||||
use crate::geom::*;
|
use crate::geom::*;
|
||||||
use crate::image::ImageCache;
|
use crate::image::ImageStore;
|
||||||
use crate::util::OptionExt;
|
use crate::util::OptionExt;
|
||||||
use crate::Context;
|
use crate::Context;
|
||||||
|
|
||||||
@ -53,11 +53,11 @@ pub trait Layout {
|
|||||||
|
|
||||||
/// The context for layouting.
|
/// The context for layouting.
|
||||||
pub struct LayoutContext<'a> {
|
pub struct LayoutContext<'a> {
|
||||||
/// The cache for parsed font faces.
|
/// Stores parsed font faces.
|
||||||
pub fonts: &'a mut FontCache,
|
pub fonts: &'a mut FontStore,
|
||||||
/// The cache for decoded imges.
|
/// Stores decoded images.
|
||||||
pub images: &'a mut ImageCache,
|
pub images: &'a mut ImageStore,
|
||||||
/// The cache for layouting artifacts.
|
/// Caches layouting artifacts.
|
||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
pub layouts: &'a mut LayoutCache,
|
pub layouts: &'a mut LayoutCache,
|
||||||
/// How deeply nested the current layout tree position is.
|
/// How deeply nested the current layout tree position is.
|
||||||
|
41
src/lib.rs
41
src/lib.rs
@ -53,26 +53,26 @@ use std::rc::Rc;
|
|||||||
use crate::diag::TypResult;
|
use crate::diag::TypResult;
|
||||||
use crate::eval::{ModuleCache, Scope};
|
use crate::eval::{ModuleCache, Scope};
|
||||||
use crate::exec::State;
|
use crate::exec::State;
|
||||||
use crate::font::FontCache;
|
use crate::font::FontStore;
|
||||||
use crate::image::ImageCache;
|
use crate::image::ImageStore;
|
||||||
use crate::layout::Frame;
|
use crate::layout::Frame;
|
||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
use crate::layout::LayoutCache;
|
use crate::layout::LayoutCache;
|
||||||
use crate::loading::Loader;
|
use crate::loading::Loader;
|
||||||
use crate::source::{SourceFile, SourceMap};
|
use crate::source::{SourceId, SourceStore};
|
||||||
|
|
||||||
/// The core context which holds the loader, configuration and cached artifacts.
|
/// The core context which holds the loader, configuration and cached artifacts.
|
||||||
pub struct Context {
|
pub struct Context {
|
||||||
/// The loader the context was created with.
|
/// The loader the context was created with.
|
||||||
pub loader: Rc<dyn Loader>,
|
pub loader: Rc<dyn Loader>,
|
||||||
/// Stores loaded source files.
|
/// Stores loaded source files.
|
||||||
pub sources: SourceMap,
|
pub sources: SourceStore,
|
||||||
|
/// Stores parsed font faces.
|
||||||
|
pub fonts: FontStore,
|
||||||
|
/// Stores decoded images.
|
||||||
|
pub images: ImageStore,
|
||||||
/// Caches evaluated modules.
|
/// Caches evaluated modules.
|
||||||
pub modules: ModuleCache,
|
pub modules: ModuleCache,
|
||||||
/// Caches parsed font faces.
|
|
||||||
pub fonts: FontCache,
|
|
||||||
/// Caches decoded images.
|
|
||||||
pub images: ImageCache,
|
|
||||||
/// Caches layouting artifacts.
|
/// Caches layouting artifacts.
|
||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
pub layouts: LayoutCache,
|
pub layouts: LayoutCache,
|
||||||
@ -93,24 +93,25 @@ impl Context {
|
|||||||
ContextBuilder::default()
|
ContextBuilder::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Garbage-collect caches.
|
|
||||||
pub fn turnaround(&mut self) {
|
|
||||||
#[cfg(feature = "layout-cache")]
|
|
||||||
self.layouts.turnaround();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Typeset a source file into a collection of layouted frames.
|
/// Typeset a source file into a collection of layouted frames.
|
||||||
///
|
///
|
||||||
/// Returns either a vector of frames representing individual pages or
|
/// Returns either a vector of frames representing individual pages or
|
||||||
/// diagnostics in the form of a vector of error message with file and span
|
/// diagnostics in the form of a vector of error message with file and span
|
||||||
/// information.
|
/// information.
|
||||||
pub fn typeset(&mut self, source: &SourceFile) -> TypResult<Vec<Rc<Frame>>> {
|
pub fn typeset(&mut self, id: SourceId) -> TypResult<Vec<Rc<Frame>>> {
|
||||||
|
let source = self.sources.get(id);
|
||||||
let ast = parse::parse(source)?;
|
let ast = parse::parse(source)?;
|
||||||
let module = eval::eval(self, source.file(), Rc::new(ast))?;
|
let module = eval::eval(self, id, Rc::new(ast))?;
|
||||||
let tree = exec::exec(self, &module.template);
|
let tree = exec::exec(self, &module.template);
|
||||||
let frames = layout::layout(self, &tree);
|
let frames = layout::layout(self, &tree);
|
||||||
Ok(frames)
|
Ok(frames)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Garbage-collect caches.
|
||||||
|
pub fn turnaround(&mut self) {
|
||||||
|
#[cfg(feature = "layout-cache")]
|
||||||
|
self.layouts.turnaround();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A builder for a [`Context`].
|
/// A builder for a [`Context`].
|
||||||
@ -140,10 +141,10 @@ impl ContextBuilder {
|
|||||||
/// fonts, images, source files and other resources.
|
/// fonts, images, source files and other resources.
|
||||||
pub fn build(self, loader: Rc<dyn Loader>) -> Context {
|
pub fn build(self, loader: Rc<dyn Loader>) -> Context {
|
||||||
Context {
|
Context {
|
||||||
loader: Rc::clone(&loader),
|
sources: SourceStore::new(Rc::clone(&loader)),
|
||||||
sources: SourceMap::new(),
|
fonts: FontStore::new(Rc::clone(&loader)),
|
||||||
fonts: FontCache::new(Rc::clone(&loader)),
|
images: ImageStore::new(Rc::clone(&loader)),
|
||||||
images: ImageCache::new(loader),
|
loader,
|
||||||
modules: ModuleCache::new(),
|
modules: ModuleCache::new(),
|
||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
layouts: LayoutCache::new(),
|
layouts: LayoutCache::new(),
|
||||||
|
@ -1,8 +1,10 @@
|
|||||||
use std::f64::consts::SQRT_2;
|
use std::f64::consts::SQRT_2;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
use decorum::N64;
|
use decorum::N64;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::diag::Error;
|
||||||
use crate::layout::{
|
use crate::layout::{
|
||||||
BackgroundNode, BackgroundShape, FixedNode, ImageNode, PadNode, Paint,
|
BackgroundNode, BackgroundShape, FixedNode, ImageNode, PadNode, Paint,
|
||||||
};
|
};
|
||||||
@ -13,13 +15,17 @@ pub fn image(ctx: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
|||||||
let width = args.named("width")?;
|
let width = args.named("width")?;
|
||||||
let height = args.named("height")?;
|
let height = args.named("height")?;
|
||||||
|
|
||||||
let file = ctx.resolve(&path.v, path.span)?;
|
let full = ctx.relpath(path.v.as_str());
|
||||||
let node = match ctx.images.load(file) {
|
let id = ctx.images.load(&full).map_err(|err| {
|
||||||
Some(id) => ImageNode { id, width, height },
|
Error::boxed(args.source, path.span, match err.kind() {
|
||||||
None => bail!(args.file, path.span, "failed to load image"),
|
io::ErrorKind::NotFound => "file not found".into(),
|
||||||
};
|
_ => format!("failed to load image ({})", err),
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
Ok(Value::template(move |ctx| ctx.push_into_par(node)))
|
Ok(Value::template(move |ctx| {
|
||||||
|
ctx.push_into_par(ImageNode { id, width, height })
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `rect`: A rectangle with optional content.
|
/// `rect`: A rectangle with optional content.
|
||||||
|
@ -6,7 +6,7 @@ use crate::paper::{Paper, PaperClass};
|
|||||||
pub fn page(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
pub fn page(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
||||||
let paper = match args.eat::<Spanned<EcoString>>() {
|
let paper = match args.eat::<Spanned<EcoString>>() {
|
||||||
Some(name) => match Paper::from_name(&name.v) {
|
Some(name) => match Paper::from_name(&name.v) {
|
||||||
None => bail!(args.file, name.span, "invalid paper name"),
|
None => bail!(args.source, name.span, "invalid paper name"),
|
||||||
paper => paper,
|
paper => paper,
|
||||||
},
|
},
|
||||||
None => None,
|
None => None,
|
||||||
|
@ -132,7 +132,7 @@ pub fn lang(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
|||||||
if dir.v.axis() == SpecAxis::Horizontal {
|
if dir.v.axis() == SpecAxis::Horizontal {
|
||||||
Some(dir.v)
|
Some(dir.v)
|
||||||
} else {
|
} else {
|
||||||
bail!(args.file, dir.span, "must be horizontal");
|
bail!(args.source, dir.span, "must be horizontal");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
iso.as_deref().map(lang_dir)
|
iso.as_deref().map(lang_dir)
|
||||||
|
@ -25,7 +25,7 @@ pub fn len(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
|||||||
Value::Str(v) => Value::Int(v.len() as i64),
|
Value::Str(v) => Value::Int(v.len() as i64),
|
||||||
Value::Array(v) => Value::Int(v.len() as i64),
|
Value::Array(v) => Value::Int(v.len() as i64),
|
||||||
Value::Dict(v) => Value::Int(v.len() as i64),
|
Value::Dict(v) => Value::Int(v.len() as i64),
|
||||||
_ => bail!(args.file, span, "expected string, array or dictionary"),
|
_ => bail!(args.source, span, "expected string, array or dictionary"),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,7 +35,7 @@ pub fn rgb(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
|||||||
if let Some(string) = args.eat::<Spanned<EcoString>>() {
|
if let Some(string) = args.eat::<Spanned<EcoString>>() {
|
||||||
match RgbaColor::from_str(&string.v) {
|
match RgbaColor::from_str(&string.v) {
|
||||||
Ok(color) => color,
|
Ok(color) => color,
|
||||||
Err(_) => bail!(args.file, string.span, "invalid color"),
|
Err(_) => bail!(args.source, string.span, "invalid color"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let r = args.expect("red component")?;
|
let r = args.expect("red component")?;
|
||||||
@ -60,7 +60,7 @@ pub fn max(_: &mut EvalContext, args: &mut FuncArgs) -> TypResult<Value> {
|
|||||||
|
|
||||||
/// Find the minimum or maximum of a sequence of values.
|
/// Find the minimum or maximum of a sequence of values.
|
||||||
fn minmax(args: &mut FuncArgs, goal: Ordering) -> TypResult<Value> {
|
fn minmax(args: &mut FuncArgs, goal: Ordering) -> TypResult<Value> {
|
||||||
let &mut FuncArgs { file, span, .. } = args;
|
let &mut FuncArgs { source, span, .. } = args;
|
||||||
|
|
||||||
let mut extremum = args.expect::<Value>("value")?;
|
let mut extremum = args.expect::<Value>("value")?;
|
||||||
for value in args.all::<Value>() {
|
for value in args.all::<Value>() {
|
||||||
@ -71,7 +71,7 @@ fn minmax(args: &mut FuncArgs, goal: Ordering) -> TypResult<Value> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
None => bail!(
|
None => bail!(
|
||||||
file,
|
source,
|
||||||
span,
|
span,
|
||||||
"cannot compare {} with {}",
|
"cannot compare {} with {}",
|
||||||
extremum.type_name(),
|
extremum.type_name(),
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
use std::cell::{Ref, RefCell};
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
use memmap2::Mmap;
|
use memmap2::Mmap;
|
||||||
@ -10,9 +8,8 @@ use same_file::Handle;
|
|||||||
use ttf_parser::{name_id, Face};
|
use ttf_parser::{name_id, Face};
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use super::{FileId, Loader};
|
use super::{FileHash, Loader};
|
||||||
use crate::font::{FaceInfo, FontStretch, FontStyle, FontVariant, FontWeight};
|
use crate::font::{FaceInfo, FontStretch, FontStyle, FontVariant, FontWeight};
|
||||||
use crate::util::PathExt;
|
|
||||||
|
|
||||||
/// Loads fonts and images from the local file system.
|
/// Loads fonts and images from the local file system.
|
||||||
///
|
///
|
||||||
@ -20,13 +17,12 @@ use crate::util::PathExt;
|
|||||||
#[derive(Debug, Default, Clone)]
|
#[derive(Debug, Default, Clone)]
|
||||||
pub struct FsLoader {
|
pub struct FsLoader {
|
||||||
faces: Vec<FaceInfo>,
|
faces: Vec<FaceInfo>,
|
||||||
paths: RefCell<HashMap<FileId, PathBuf>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FsLoader {
|
impl FsLoader {
|
||||||
/// Create a new loader without any fonts.
|
/// Create a new loader without any fonts.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { faces: vec![], paths: RefCell::default() }
|
Self { faces: vec![] }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builder-style variant of `search_system`.
|
/// Builder-style variant of `search_system`.
|
||||||
@ -52,51 +48,6 @@ impl FsLoader {
|
|||||||
self.search_system_impl();
|
self.search_system_impl();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search for all fonts at a path.
|
|
||||||
///
|
|
||||||
/// If the path is a directory, all contained fonts will be searched for
|
|
||||||
/// recursively.
|
|
||||||
pub fn search_path(&mut self, dir: impl AsRef<Path>) {
|
|
||||||
let walk = WalkDir::new(dir)
|
|
||||||
.follow_links(true)
|
|
||||||
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|e| e.ok());
|
|
||||||
|
|
||||||
for entry in walk {
|
|
||||||
let path = entry.path();
|
|
||||||
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
|
|
||||||
match ext {
|
|
||||||
#[rustfmt::skip]
|
|
||||||
"ttf" | "otf" | "TTF" | "OTF" |
|
|
||||||
"ttc" | "otc" | "TTC" | "OTC" => {
|
|
||||||
self.search_file(path).ok();
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resolve a file id for a path.
|
|
||||||
pub fn resolve(&self, path: &Path) -> io::Result<FileId> {
|
|
||||||
let file = File::open(path)?;
|
|
||||||
let meta = file.metadata()?;
|
|
||||||
if meta.is_file() {
|
|
||||||
let handle = Handle::from_file(file)?;
|
|
||||||
let id = FileId(fxhash::hash64(&handle));
|
|
||||||
self.paths.borrow_mut().insert(id, path.normalize());
|
|
||||||
Ok(id)
|
|
||||||
} else {
|
|
||||||
Err(io::Error::new(io::ErrorKind::Other, "not a file"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the path of a resolved file.
|
|
||||||
pub fn path(&self, id: FileId) -> Ref<Path> {
|
|
||||||
Ref::map(self.paths.borrow(), |paths| paths[&id].as_path())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(all(unix, not(target_os = "macos")))]
|
#[cfg(all(unix, not(target_os = "macos")))]
|
||||||
fn search_system_impl(&mut self) {
|
fn search_system_impl(&mut self) {
|
||||||
self.search_path("/usr/share/fonts");
|
self.search_path("/usr/share/fonts");
|
||||||
@ -134,6 +85,32 @@ impl FsLoader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Search for all fonts at a path.
|
||||||
|
///
|
||||||
|
/// If the path is a directory, all contained fonts will be searched for
|
||||||
|
/// recursively.
|
||||||
|
pub fn search_path(&mut self, dir: impl AsRef<Path>) {
|
||||||
|
let walk = WalkDir::new(dir)
|
||||||
|
.follow_links(true)
|
||||||
|
.sort_by(|a, b| a.file_name().cmp(b.file_name()))
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|e| e.ok());
|
||||||
|
|
||||||
|
for entry in walk {
|
||||||
|
let path = entry.path();
|
||||||
|
if let Some(ext) = path.extension().and_then(|s| s.to_str()) {
|
||||||
|
match ext {
|
||||||
|
#[rustfmt::skip]
|
||||||
|
"ttf" | "otf" | "TTF" | "OTF" |
|
||||||
|
"ttc" | "otc" | "TTC" | "OTC" => {
|
||||||
|
self.search_file(path).ok();
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Index the font faces in the file at the given path.
|
/// Index the font faces in the file at the given path.
|
||||||
///
|
///
|
||||||
/// The file may form a font collection and contain multiple font faces,
|
/// The file may form a font collection and contain multiple font faces,
|
||||||
@ -180,8 +157,12 @@ impl FsLoader {
|
|||||||
stretch: FontStretch::from_number(face.width().to_number()),
|
stretch: FontStretch::from_number(face.width().to_number()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let file = self.resolve(path)?;
|
self.faces.push(FaceInfo {
|
||||||
self.faces.push(FaceInfo { file, index, family, variant });
|
path: path.to_owned(),
|
||||||
|
index,
|
||||||
|
family,
|
||||||
|
variant,
|
||||||
|
});
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -192,16 +173,19 @@ impl Loader for FsLoader {
|
|||||||
&self.faces
|
&self.faces
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_from(&self, base: FileId, path: &Path) -> io::Result<FileId> {
|
fn resolve(&self, path: &Path) -> io::Result<FileHash> {
|
||||||
let full = self.paths.borrow()[&base]
|
let file = File::open(path)?;
|
||||||
.parent()
|
let meta = file.metadata()?;
|
||||||
.expect("base is a file")
|
if meta.is_file() {
|
||||||
.join(path);
|
let handle = Handle::from_file(file)?;
|
||||||
self.resolve(&full)
|
Ok(FileHash(fxhash::hash64(&handle)))
|
||||||
|
} else {
|
||||||
|
Err(io::Error::new(io::ErrorKind::Other, "not a file"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_file(&self, id: FileId) -> io::Result<Vec<u8>> {
|
fn load(&self, path: &Path) -> io::Result<Vec<u8>> {
|
||||||
fs::read(&self.paths.borrow()[&id])
|
fs::read(path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -211,8 +195,8 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_index_font_dir() {
|
fn test_index_font_dir() {
|
||||||
let map = FsLoader::new().with_path("fonts").paths.into_inner();
|
let faces = FsLoader::new().with_path("fonts").faces;
|
||||||
let mut paths: Vec<_> = map.into_iter().map(|p| p.1).collect();
|
let mut paths: Vec<_> = faces.into_iter().map(|info| info.path).collect();
|
||||||
paths.sort();
|
paths.sort();
|
||||||
|
|
||||||
assert_eq!(paths, [
|
assert_eq!(paths, [
|
||||||
|
@ -13,41 +13,24 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
use crate::font::FaceInfo;
|
use crate::font::FaceInfo;
|
||||||
|
|
||||||
|
/// A hash that identifies a file.
|
||||||
|
///
|
||||||
|
/// Such a hash can be [resolved](Loader::resolve) from a path.
|
||||||
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
|
#[derive(Serialize, Deserialize)]
|
||||||
|
pub struct FileHash(pub u64);
|
||||||
|
|
||||||
/// Loads resources from a local or remote source.
|
/// Loads resources from a local or remote source.
|
||||||
pub trait Loader {
|
pub trait Loader {
|
||||||
/// Descriptions of all font faces this loader serves.
|
/// Descriptions of all font faces this loader serves.
|
||||||
fn faces(&self) -> &[FaceInfo];
|
fn faces(&self) -> &[FaceInfo];
|
||||||
|
|
||||||
/// Resolve a `path` relative to a `base` file.
|
/// Resolve a hash that is the same for this and all other paths pointing to
|
||||||
///
|
/// the same file.
|
||||||
/// This should return the same id for all paths pointing to the same file
|
fn resolve(&self, path: &Path) -> io::Result<FileHash>;
|
||||||
/// and `None` if the file does not exist.
|
|
||||||
fn resolve_from(&self, base: FileId, path: &Path) -> io::Result<FileId>;
|
|
||||||
|
|
||||||
/// Load a file by id.
|
/// Load a file from a path.
|
||||||
///
|
fn load(&self, path: &Path) -> io::Result<Vec<u8>>;
|
||||||
/// This must only be called with an `id` returned by a call to this
|
|
||||||
/// loader's `resolve_from` method.
|
|
||||||
fn load_file(&self, id: FileId) -> io::Result<Vec<u8>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A file id that can be [resolved](Loader::resolve_from) from a path.
|
|
||||||
///
|
|
||||||
/// Should be the same for all paths pointing to the same file.
|
|
||||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
|
||||||
#[derive(Serialize, Deserialize)]
|
|
||||||
pub struct FileId(u64);
|
|
||||||
|
|
||||||
impl FileId {
|
|
||||||
/// Create a file id from a raw value.
|
|
||||||
pub const fn from_raw(v: u64) -> Self {
|
|
||||||
Self(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Convert into the raw underlying value.
|
|
||||||
pub const fn into_raw(self) -> u64 {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A loader which serves nothing.
|
/// A loader which serves nothing.
|
||||||
@ -58,11 +41,11 @@ impl Loader for BlankLoader {
|
|||||||
&[]
|
&[]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_from(&self, _: FileId, _: &Path) -> io::Result<FileId> {
|
fn resolve(&self, _: &Path) -> io::Result<FileHash> {
|
||||||
Err(io::ErrorKind::NotFound.into())
|
Err(io::ErrorKind::NotFound.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn load_file(&self, _: FileId) -> io::Result<Vec<u8>> {
|
fn load(&self, _: &Path) -> io::Result<Vec<u8>> {
|
||||||
panic!("resolve_from never returns an id")
|
Err(io::ErrorKind::NotFound.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
103
src/main.rs
103
src/main.rs
@ -1,19 +1,16 @@
|
|||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::ops::Range;
|
use std::path::Path;
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::process;
|
use std::process;
|
||||||
|
|
||||||
use anyhow::{anyhow, bail, Context};
|
use anyhow::Context as _;
|
||||||
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
use codespan_reporting::diagnostic::{Diagnostic, Label};
|
||||||
use codespan_reporting::files::{self, Files};
|
|
||||||
use codespan_reporting::term::{self, termcolor, Config, Styles};
|
use codespan_reporting::term::{self, termcolor, Config, Styles};
|
||||||
use same_file::is_same_file;
|
use same_file::is_same_file;
|
||||||
use termcolor::{ColorChoice, StandardStream, WriteColor};
|
use termcolor::{ColorChoice, StandardStream, WriteColor};
|
||||||
|
|
||||||
use typst::diag::{Error, Tracepoint};
|
use typst::diag::{Error, Tracepoint};
|
||||||
use typst::loading::{FileId, FsLoader};
|
use typst::source::SourceStore;
|
||||||
use typst::source::{SourceFile, SourceMap};
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
if let Err(error) = try_main() {
|
if let Err(error) = try_main() {
|
||||||
@ -32,19 +29,17 @@ fn try_main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Determine source and destination path.
|
// Determine source and destination path.
|
||||||
let src_path = Path::new(&args[1]);
|
let src_path = Path::new(&args[1]);
|
||||||
let dest_path = if let Some(arg) = args.get(2) {
|
let dest_path = match args.get(2) {
|
||||||
PathBuf::from(arg)
|
Some(path) => path.into(),
|
||||||
} else {
|
None => {
|
||||||
let name = src_path
|
let name = src_path.file_name().context("source path is not a file")?;
|
||||||
.file_name()
|
|
||||||
.ok_or_else(|| anyhow!("source path is not a file"))?;
|
|
||||||
|
|
||||||
Path::new(name).with_extension("pdf")
|
Path::new(name).with_extension("pdf")
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure that the source file is not overwritten.
|
// Ensure that the source file is not overwritten.
|
||||||
if is_same_file(src_path, &dest_path).unwrap_or(false) {
|
if is_same_file(src_path, &dest_path).unwrap_or(false) {
|
||||||
bail!("source and destination files are the same");
|
anyhow::bail!("source and destination files are the same");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a loader for fonts and files.
|
// Create a loader for fonts and files.
|
||||||
@ -53,14 +48,15 @@ fn try_main() -> anyhow::Result<()> {
|
|||||||
.with_system()
|
.with_system()
|
||||||
.wrap();
|
.wrap();
|
||||||
|
|
||||||
// Resolve the file id of the source file and read the file.
|
// Create the context which holds loaded source files, fonts, images and
|
||||||
let file = loader.resolve(src_path).context("source file not found")?;
|
// cached artifacts.
|
||||||
let string = fs::read_to_string(&src_path).context("failed to read source file")?;
|
let mut ctx = typst::Context::new(loader);
|
||||||
let source = SourceFile::new(file, string);
|
|
||||||
|
// Load the source file.
|
||||||
|
let id = ctx.sources.load(&src_path).context("source file not found")?;
|
||||||
|
|
||||||
// Typeset.
|
// Typeset.
|
||||||
let mut ctx = typst::Context::new(loader.clone());
|
match ctx.typeset(id) {
|
||||||
match ctx.typeset(&source) {
|
|
||||||
// Export the PDF.
|
// Export the PDF.
|
||||||
Ok(document) => {
|
Ok(document) => {
|
||||||
let buffer = typst::export::pdf(&ctx, &document);
|
let buffer = typst::export::pdf(&ctx, &document);
|
||||||
@ -69,8 +65,7 @@ fn try_main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
// Print diagnostics.
|
// Print diagnostics.
|
||||||
Err(errors) => {
|
Err(errors) => {
|
||||||
ctx.sources.insert(source);
|
print_diagnostics(&ctx.sources, *errors)
|
||||||
print_diagnostics(&loader, &ctx.sources, *errors)
|
|
||||||
.context("failed to print diagnostics")?;
|
.context("failed to print diagnostics")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -110,21 +105,19 @@ fn print_error(error: anyhow::Error) -> io::Result<()> {
|
|||||||
|
|
||||||
/// Print diagnostics messages to the terminal.
|
/// Print diagnostics messages to the terminal.
|
||||||
fn print_diagnostics(
|
fn print_diagnostics(
|
||||||
loader: &FsLoader,
|
sources: &SourceStore,
|
||||||
sources: &SourceMap,
|
|
||||||
errors: Vec<Error>,
|
errors: Vec<Error>,
|
||||||
) -> Result<(), files::Error> {
|
) -> Result<(), codespan_reporting::files::Error> {
|
||||||
let mut writer = StandardStream::stderr(ColorChoice::Always);
|
let mut writer = StandardStream::stderr(ColorChoice::Always);
|
||||||
let config = Config { tab_width: 2, ..Default::default() };
|
let config = Config { tab_width: 2, ..Default::default() };
|
||||||
let files = FilesImpl(loader, sources);
|
|
||||||
|
|
||||||
for error in errors {
|
for error in errors {
|
||||||
// The main diagnostic.
|
// The main diagnostic.
|
||||||
let main = Diagnostic::error()
|
let main = Diagnostic::error()
|
||||||
.with_message(error.message)
|
.with_message(error.message)
|
||||||
.with_labels(vec![Label::primary(error.file, error.span.to_range())]);
|
.with_labels(vec![Label::primary(error.source, error.span.to_range())]);
|
||||||
|
|
||||||
term::emit(&mut writer, &config, &files, &main)?;
|
term::emit(&mut writer, &config, sources, &main)?;
|
||||||
|
|
||||||
// Stacktrace-like helper diagnostics.
|
// Stacktrace-like helper diagnostics.
|
||||||
for (file, span, point) in error.trace {
|
for (file, span, point) in error.trace {
|
||||||
@ -140,61 +133,9 @@ fn print_diagnostics(
|
|||||||
.with_message(message)
|
.with_message(message)
|
||||||
.with_labels(vec![Label::primary(file, span.to_range())]);
|
.with_labels(vec![Label::primary(file, span.to_range())]);
|
||||||
|
|
||||||
term::emit(&mut writer, &config, &files, &help)?;
|
term::emit(&mut writer, &config, sources, &help)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Required for error message formatting with codespan-reporting.
|
|
||||||
struct FilesImpl<'a>(&'a FsLoader, &'a SourceMap);
|
|
||||||
|
|
||||||
impl FilesImpl<'_> {
|
|
||||||
fn source(&self, id: FileId) -> Result<&SourceFile, files::Error> {
|
|
||||||
self.1.get(id).ok_or(files::Error::FileMissing)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Files<'a> for FilesImpl<'a> {
|
|
||||||
type FileId = FileId;
|
|
||||||
type Name = String;
|
|
||||||
type Source = &'a str;
|
|
||||||
|
|
||||||
fn name(&'a self, id: FileId) -> Result<Self::Name, files::Error> {
|
|
||||||
Ok(self.0.path(id).display().to_string())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn source(&'a self, id: FileId) -> Result<Self::Source, files::Error> {
|
|
||||||
Ok(self.source(id)?.src())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_index(
|
|
||||||
&'a self,
|
|
||||||
id: FileId,
|
|
||||||
byte_index: usize,
|
|
||||||
) -> Result<usize, files::Error> {
|
|
||||||
let source = self.source(id)?;
|
|
||||||
source.pos_to_line(byte_index.into()).ok_or_else(|| {
|
|
||||||
let (given, max) = (byte_index, source.len_bytes());
|
|
||||||
if given <= max {
|
|
||||||
files::Error::InvalidCharBoundary { given }
|
|
||||||
} else {
|
|
||||||
files::Error::IndexTooLarge { given, max }
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn line_range(
|
|
||||||
&'a self,
|
|
||||||
id: FileId,
|
|
||||||
line_index: usize,
|
|
||||||
) -> Result<Range<usize>, files::Error> {
|
|
||||||
let source = self.source(id)?;
|
|
||||||
let span = source.line_to_span(line_index).ok_or(files::Error::LineTooLarge {
|
|
||||||
given: line_index,
|
|
||||||
max: source.len_lines(),
|
|
||||||
})?;
|
|
||||||
Ok(span.to_range())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -82,7 +82,7 @@ impl<'s> Parser<'s> {
|
|||||||
|
|
||||||
/// Add an error with location and message.
|
/// Add an error with location and message.
|
||||||
pub fn error(&mut self, span: impl Into<Span>, message: impl Into<String>) {
|
pub fn error(&mut self, span: impl Into<Span>, message: impl Into<String>) {
|
||||||
self.errors.push(Error::new(self.source.file(), span, message));
|
self.errors.push(Error::new(self.source.id(), span, message));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Eat the next token and add an error that it is not the expected `thing`.
|
/// Eat the next token and add an error that it is not the expected `thing`.
|
||||||
|
@ -608,7 +608,6 @@ pretty_display! {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::loading::FileId;
|
|
||||||
use crate::parse::parse;
|
use crate::parse::parse;
|
||||||
use crate::source::SourceFile;
|
use crate::source::SourceFile;
|
||||||
|
|
||||||
@ -619,7 +618,7 @@ mod tests {
|
|||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn test_parse(src: &str, exp: &str) {
|
fn test_parse(src: &str, exp: &str) {
|
||||||
let source = SourceFile::new(FileId::from_raw(0), src.into());
|
let source = SourceFile::detached(src);
|
||||||
let ast = parse(&source).unwrap();
|
let ast = parse(&source).unwrap();
|
||||||
let found = pretty(&ast);
|
let found = pretty(&ast);
|
||||||
if exp != found {
|
if exp != found {
|
||||||
|
215
src/source.rs
215
src/source.rs
@ -1,55 +1,126 @@
|
|||||||
//! Source files.
|
//! Source files.
|
||||||
|
|
||||||
use std::collections::{hash_map::Entry, HashMap};
|
use std::collections::HashMap;
|
||||||
|
use std::io;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
use crate::loading::FileId;
|
#[cfg(feature = "codespan-reporting")]
|
||||||
|
use codespan_reporting::files::{self, Files};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
use crate::loading::{FileHash, Loader};
|
||||||
use crate::parse::{is_newline, Scanner};
|
use crate::parse::{is_newline, Scanner};
|
||||||
use crate::syntax::{Pos, Span};
|
use crate::syntax::{Pos, Span};
|
||||||
|
use crate::util::PathExt;
|
||||||
|
|
||||||
/// A store for loaded source files.
|
/// A unique identifier for a loaded source file.
|
||||||
#[derive(Default)]
|
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub struct SourceMap {
|
#[derive(Serialize, Deserialize)]
|
||||||
sources: HashMap<FileId, SourceFile>,
|
pub struct SourceId(u32);
|
||||||
|
|
||||||
|
impl SourceId {
|
||||||
|
/// Create a source id from the raw underlying value.
|
||||||
|
///
|
||||||
|
/// This should only be called with values returned by
|
||||||
|
/// [`into_raw`](Self::into_raw).
|
||||||
|
pub const fn from_raw(v: u32) -> Self {
|
||||||
|
Self(v)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceMap {
|
/// Convert into the raw underlying value.
|
||||||
/// Create a new, empty source map
|
pub const fn into_raw(self) -> u32 {
|
||||||
pub fn new() -> Self {
|
self.0
|
||||||
Self::default()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get a source file by id.
|
|
||||||
pub fn get(&self, file: FileId) -> Option<&SourceFile> {
|
|
||||||
self.sources.get(&file)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Insert a sources.
|
|
||||||
pub fn insert(&mut self, source: SourceFile) -> &SourceFile {
|
|
||||||
match self.sources.entry(source.file) {
|
|
||||||
Entry::Occupied(mut entry) => {
|
|
||||||
entry.insert(source);
|
|
||||||
entry.into_mut()
|
|
||||||
}
|
|
||||||
Entry::Vacant(entry) => entry.insert(source),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove all sources.
|
/// Storage for loaded source files.
|
||||||
pub fn clear(&mut self) {
|
pub struct SourceStore {
|
||||||
self.sources.clear();
|
loader: Rc<dyn Loader>,
|
||||||
|
files: HashMap<FileHash, SourceId>,
|
||||||
|
sources: Vec<SourceFile>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SourceStore {
|
||||||
|
/// Create a new, empty source store.
|
||||||
|
pub fn new(loader: Rc<dyn Loader>) -> Self {
|
||||||
|
Self {
|
||||||
|
loader,
|
||||||
|
files: HashMap::new(),
|
||||||
|
sources: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load a source file from a path using the `loader`.
|
||||||
|
pub fn load(&mut self, path: &Path) -> io::Result<SourceId> {
|
||||||
|
let hash = self.loader.resolve(path)?;
|
||||||
|
if let Some(&id) = self.files.get(&hash) {
|
||||||
|
return Ok(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data = self.loader.load(path)?;
|
||||||
|
let src = String::from_utf8(data).map_err(|_| {
|
||||||
|
io::Error::new(io::ErrorKind::InvalidData, "file is not valid utf-8")
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(self.insert(Some(hash), path, src))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Directly provide a source file.
|
||||||
|
///
|
||||||
|
/// The `path` does not need to be [resolvable](Loader::resolve) through the
|
||||||
|
/// `loader`. If it is though, imports that resolve to the same file hash
|
||||||
|
/// will use the inserted file instead of going through [`Loader::load`].
|
||||||
|
///
|
||||||
|
/// If the path is resolvable and points to an existing source file, it is
|
||||||
|
/// overwritten.
|
||||||
|
pub fn provide(&mut self, path: &Path, src: String) -> SourceId {
|
||||||
|
if let Ok(hash) = self.loader.resolve(path) {
|
||||||
|
if let Some(&id) = self.files.get(&hash) {
|
||||||
|
// Already loaded, so we replace it.
|
||||||
|
self.sources[id.0 as usize] = SourceFile::new(id, path, src);
|
||||||
|
id
|
||||||
|
} else {
|
||||||
|
// Not loaded yet.
|
||||||
|
self.insert(Some(hash), path, src)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Not known to the loader.
|
||||||
|
self.insert(None, path, src)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a new source file.
|
||||||
|
fn insert(&mut self, hash: Option<FileHash>, path: &Path, src: String) -> SourceId {
|
||||||
|
let id = SourceId(self.sources.len() as u32);
|
||||||
|
if let Some(hash) = hash {
|
||||||
|
self.files.insert(hash, id);
|
||||||
|
}
|
||||||
|
self.sources.push(SourceFile::new(id, path, src));
|
||||||
|
id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a reference to a loaded source file.
|
||||||
|
///
|
||||||
|
/// This panics if no source file with this id was loaded. This function
|
||||||
|
/// should only be called with ids returned by this store's
|
||||||
|
/// [`load()`](Self::load) and [`provide()`](Self::provide) methods.
|
||||||
|
#[track_caller]
|
||||||
|
pub fn get(&self, id: SourceId) -> &SourceFile {
|
||||||
|
&self.sources[id.0 as usize]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A single source file.
|
/// A single source file.
|
||||||
pub struct SourceFile {
|
pub struct SourceFile {
|
||||||
file: FileId,
|
id: SourceId,
|
||||||
|
path: PathBuf,
|
||||||
src: String,
|
src: String,
|
||||||
line_starts: Vec<Pos>,
|
line_starts: Vec<Pos>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceFile {
|
impl SourceFile {
|
||||||
/// Create a new source file from string.
|
fn new(id: SourceId, path: &Path, src: String) -> Self {
|
||||||
pub fn new(file: FileId, src: String) -> Self {
|
|
||||||
let mut line_starts = vec![Pos::ZERO];
|
let mut line_starts = vec![Pos::ZERO];
|
||||||
let mut s = Scanner::new(&src);
|
let mut s = Scanner::new(&src);
|
||||||
|
|
||||||
@ -62,12 +133,27 @@ impl SourceFile {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Self { file, src, line_starts }
|
Self {
|
||||||
|
id,
|
||||||
|
path: path.normalize(),
|
||||||
|
src,
|
||||||
|
line_starts,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The file id.
|
/// Create a source file without a real id and path, usually for testing.
|
||||||
pub fn file(&self) -> FileId {
|
pub fn detached(src: impl Into<String>) -> Self {
|
||||||
self.file
|
Self::new(SourceId(0), Path::new(""), src.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The id of the source file.
|
||||||
|
pub fn id(&self) -> SourceId {
|
||||||
|
self.id
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The path to the source file.
|
||||||
|
pub fn path(&self) -> &Path {
|
||||||
|
&self.path
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The whole source as a string slice.
|
/// The whole source as a string slice.
|
||||||
@ -150,22 +236,73 @@ fn width(c: char) -> usize {
|
|||||||
if c == '\t' { 2 } else { 1 }
|
if c == '\t' { 2 } else { 1 }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl AsRef<str> for SourceFile {
|
||||||
|
fn as_ref(&self) -> &str {
|
||||||
|
&self.src
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "codespan-reporting")]
|
||||||
|
impl<'a> Files<'a> for SourceStore {
|
||||||
|
type FileId = SourceId;
|
||||||
|
type Name = std::path::Display<'a>;
|
||||||
|
type Source = &'a SourceFile;
|
||||||
|
|
||||||
|
fn name(&'a self, id: SourceId) -> Result<Self::Name, files::Error> {
|
||||||
|
Ok(self.get(id).path().display())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn source(&'a self, id: SourceId) -> Result<Self::Source, files::Error> {
|
||||||
|
Ok(self.get(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line_index(
|
||||||
|
&'a self,
|
||||||
|
id: SourceId,
|
||||||
|
byte_index: usize,
|
||||||
|
) -> Result<usize, files::Error> {
|
||||||
|
let source = self.get(id);
|
||||||
|
source.pos_to_line(byte_index.into()).ok_or_else(|| {
|
||||||
|
let (given, max) = (byte_index, source.len_bytes());
|
||||||
|
if given <= max {
|
||||||
|
files::Error::InvalidCharBoundary { given }
|
||||||
|
} else {
|
||||||
|
files::Error::IndexTooLarge { given, max }
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn line_range(
|
||||||
|
&'a self,
|
||||||
|
id: SourceId,
|
||||||
|
line_index: usize,
|
||||||
|
) -> Result<std::ops::Range<usize>, files::Error> {
|
||||||
|
let source = self.get(id);
|
||||||
|
match source.line_to_span(line_index) {
|
||||||
|
Some(span) => Ok(span.to_range()),
|
||||||
|
None => Err(files::Error::LineTooLarge {
|
||||||
|
given: line_index,
|
||||||
|
max: source.len_lines(),
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
const ID: FileId = FileId::from_raw(0);
|
|
||||||
const TEST: &str = "äbcde\nf💛g\r\nhi\rjkl";
|
const TEST: &str = "äbcde\nf💛g\r\nhi\rjkl";
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_source_file_new() {
|
fn test_source_file_new() {
|
||||||
let source = SourceFile::new(ID, TEST.into());
|
let source = SourceFile::detached(TEST);
|
||||||
assert_eq!(source.line_starts, vec![Pos(0), Pos(7), Pos(15), Pos(18)]);
|
assert_eq!(source.line_starts, vec![Pos(0), Pos(7), Pos(15), Pos(18)]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_source_file_pos_to_line() {
|
fn test_source_file_pos_to_line() {
|
||||||
let source = SourceFile::new(ID, TEST.into());
|
let source = SourceFile::detached(TEST);
|
||||||
assert_eq!(source.pos_to_line(Pos(0)), Some(0));
|
assert_eq!(source.pos_to_line(Pos(0)), Some(0));
|
||||||
assert_eq!(source.pos_to_line(Pos(2)), Some(0));
|
assert_eq!(source.pos_to_line(Pos(2)), Some(0));
|
||||||
assert_eq!(source.pos_to_line(Pos(6)), Some(0));
|
assert_eq!(source.pos_to_line(Pos(6)), Some(0));
|
||||||
@ -186,7 +323,7 @@ mod tests {
|
|||||||
assert_eq!(result, byte_pos);
|
assert_eq!(result, byte_pos);
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = SourceFile::new(ID, TEST.into());
|
let source = SourceFile::detached(TEST);
|
||||||
roundtrip(&source, Pos(0));
|
roundtrip(&source, Pos(0));
|
||||||
roundtrip(&source, Pos(7));
|
roundtrip(&source, Pos(7));
|
||||||
roundtrip(&source, Pos(12));
|
roundtrip(&source, Pos(12));
|
||||||
|
@ -49,7 +49,7 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
// Some non-text stuff.
|
// Some non-text stuff.
|
||||||
// Error: 16-37 file is not valid utf-8
|
// Error: 16-37 failed to load source file (file is not valid utf-8)
|
||||||
#import * from "../../res/rhino.png"
|
#import * from "../../res/rhino.png"
|
||||||
|
|
||||||
---
|
---
|
||||||
|
@ -36,5 +36,5 @@
|
|||||||
#image("path/does/not/exist")
|
#image("path/does/not/exist")
|
||||||
|
|
||||||
---
|
---
|
||||||
// Error: 8-21 failed to load image
|
// Error: 8-21 failed to load image (unknown image format)
|
||||||
#image("./image.typ")
|
#image("./image.typ")
|
||||||
|
@ -16,9 +16,9 @@ use typst::exec::{exec, State};
|
|||||||
use typst::geom::{self, Length, PathElement, Point, Sides, Size};
|
use typst::geom::{self, Length, PathElement, Point, Sides, Size};
|
||||||
use typst::image::ImageId;
|
use typst::image::ImageId;
|
||||||
use typst::layout::{layout, Element, Frame, Geometry, LayoutTree, Paint, Text};
|
use typst::layout::{layout, Element, Frame, Geometry, LayoutTree, Paint, Text};
|
||||||
use typst::loading::{FileId, FsLoader};
|
use typst::loading::FsLoader;
|
||||||
use typst::parse::{parse, Scanner};
|
use typst::parse::{parse, Scanner};
|
||||||
use typst::source::SourceFile;
|
use typst::source::{SourceFile, SourceId};
|
||||||
use typst::syntax::Pos;
|
use typst::syntax::Pos;
|
||||||
use typst::Context;
|
use typst::Context;
|
||||||
|
|
||||||
@ -71,7 +71,7 @@ fn main() {
|
|||||||
let rhs = args.expect::<Value>("right-hand side")?;
|
let rhs = args.expect::<Value>("right-hand side")?;
|
||||||
if lhs != rhs {
|
if lhs != rhs {
|
||||||
typst::bail!(
|
typst::bail!(
|
||||||
args.file,
|
args.source,
|
||||||
args.span,
|
args.span,
|
||||||
"Assertion failed: {:?} != {:?}",
|
"Assertion failed: {:?} != {:?}",
|
||||||
lhs,
|
lhs,
|
||||||
@ -83,7 +83,7 @@ fn main() {
|
|||||||
|
|
||||||
// Create loader and context.
|
// Create loader and context.
|
||||||
let loader = FsLoader::new().with_path(FONT_DIR).wrap();
|
let loader = FsLoader::new().with_path(FONT_DIR).wrap();
|
||||||
let mut ctx = Context::builder().std(std).state(state).build(loader.clone());
|
let mut ctx = Context::builder().std(std).state(state).build(loader);
|
||||||
|
|
||||||
// Run all the tests.
|
// Run all the tests.
|
||||||
let mut ok = true;
|
let mut ok = true;
|
||||||
@ -96,7 +96,6 @@ fn main() {
|
|||||||
|
|
||||||
ok &= test(
|
ok &= test(
|
||||||
&mut ctx,
|
&mut ctx,
|
||||||
loader.as_ref(),
|
|
||||||
&src_path,
|
&src_path,
|
||||||
&png_path,
|
&png_path,
|
||||||
&ref_path,
|
&ref_path,
|
||||||
@ -144,7 +143,6 @@ impl Args {
|
|||||||
|
|
||||||
fn test(
|
fn test(
|
||||||
ctx: &mut Context,
|
ctx: &mut Context,
|
||||||
loader: &FsLoader,
|
|
||||||
src_path: &Path,
|
src_path: &Path,
|
||||||
png_path: &Path,
|
png_path: &Path,
|
||||||
ref_path: &Path,
|
ref_path: &Path,
|
||||||
@ -153,7 +151,6 @@ fn test(
|
|||||||
let name = src_path.strip_prefix(TYP_DIR).unwrap_or(src_path);
|
let name = src_path.strip_prefix(TYP_DIR).unwrap_or(src_path);
|
||||||
println!("Testing {}", name.display());
|
println!("Testing {}", name.display());
|
||||||
|
|
||||||
let file = loader.resolve(src_path).unwrap();
|
|
||||||
let src = fs::read_to_string(src_path).unwrap();
|
let src = fs::read_to_string(src_path).unwrap();
|
||||||
|
|
||||||
let mut ok = true;
|
let mut ok = true;
|
||||||
@ -178,7 +175,7 @@ fn test(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let (part_ok, compare_here, part_frames) =
|
let (part_ok, compare_here, part_frames) =
|
||||||
test_part(ctx, file, part, i, compare_ref, line);
|
test_part(ctx, src_path, part.into(), i, compare_ref, line);
|
||||||
ok &= part_ok;
|
ok &= part_ok;
|
||||||
compare_ever |= compare_here;
|
compare_ever |= compare_here;
|
||||||
frames.extend(part_frames);
|
frames.extend(part_frames);
|
||||||
@ -218,19 +215,21 @@ fn test(
|
|||||||
|
|
||||||
fn test_part(
|
fn test_part(
|
||||||
ctx: &mut Context,
|
ctx: &mut Context,
|
||||||
file: FileId,
|
src_path: &Path,
|
||||||
src: &str,
|
src: String,
|
||||||
i: usize,
|
i: usize,
|
||||||
compare_ref: bool,
|
compare_ref: bool,
|
||||||
line: usize,
|
line: usize,
|
||||||
) -> (bool, bool, Vec<Rc<Frame>>) {
|
) -> (bool, bool, Vec<Rc<Frame>>) {
|
||||||
let source = SourceFile::new(file, src.into());
|
let id = ctx.sources.provide(src_path, src);
|
||||||
|
let source = ctx.sources.get(id);
|
||||||
|
|
||||||
let (local_compare_ref, mut ref_errors) = parse_metadata(&source);
|
let (local_compare_ref, mut ref_errors) = parse_metadata(&source);
|
||||||
let compare_ref = local_compare_ref.unwrap_or(compare_ref);
|
let compare_ref = local_compare_ref.unwrap_or(compare_ref);
|
||||||
|
|
||||||
let mut ok = true;
|
let mut ok = true;
|
||||||
|
|
||||||
let result = typeset(ctx, &source);
|
let result = typeset(ctx, id);
|
||||||
let (frames, mut errors) = match result {
|
let (frames, mut errors) = match result {
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
Ok((tree, mut frames)) => {
|
Ok((tree, mut frames)) => {
|
||||||
@ -247,7 +246,7 @@ fn test_part(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// TODO: Also handle errors from other files.
|
// TODO: Also handle errors from other files.
|
||||||
errors.retain(|error| error.file == source.file());
|
errors.retain(|error| error.source == id);
|
||||||
for error in &mut errors {
|
for error in &mut errors {
|
||||||
error.trace.clear();
|
error.trace.clear();
|
||||||
}
|
}
|
||||||
@ -259,8 +258,9 @@ fn test_part(
|
|||||||
println!(" Subtest {} does not match expected errors. ❌", i);
|
println!(" Subtest {} does not match expected errors. ❌", i);
|
||||||
ok = false;
|
ok = false;
|
||||||
|
|
||||||
|
let source = ctx.sources.get(id);
|
||||||
for error in errors.iter() {
|
for error in errors.iter() {
|
||||||
if error.file == file && !ref_errors.contains(error) {
|
if error.source == id && !ref_errors.contains(error) {
|
||||||
print!(" Not annotated | ");
|
print!(" Not annotated | ");
|
||||||
print_error(&source, line, error);
|
print_error(&source, line, error);
|
||||||
}
|
}
|
||||||
@ -277,6 +277,15 @@ fn test_part(
|
|||||||
(ok, compare_ref, frames)
|
(ok, compare_ref, frames)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn typeset(ctx: &mut Context, id: SourceId) -> TypResult<(LayoutTree, Vec<Rc<Frame>>)> {
|
||||||
|
let source = ctx.sources.get(id);
|
||||||
|
let ast = parse(source)?;
|
||||||
|
let module = eval(ctx, id, Rc::new(ast))?;
|
||||||
|
let tree = exec(ctx, &module.template);
|
||||||
|
let frames = layout(ctx, &tree);
|
||||||
|
Ok((tree, frames))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "layout-cache")]
|
#[cfg(feature = "layout-cache")]
|
||||||
fn test_incremental(
|
fn test_incremental(
|
||||||
ctx: &mut Context,
|
ctx: &mut Context,
|
||||||
@ -362,28 +371,17 @@ fn parse_metadata(source: &SourceFile) -> (Option<bool>, Vec<Error>) {
|
|||||||
let start = pos(&mut s);
|
let start = pos(&mut s);
|
||||||
let end = if s.eat_if('-') { pos(&mut s) } else { start };
|
let end = if s.eat_if('-') { pos(&mut s) } else { start };
|
||||||
|
|
||||||
errors.push(Error::new(source.file(), start .. end, s.rest().trim()));
|
errors.push(Error::new(source.id(), start .. end, s.rest().trim()));
|
||||||
}
|
}
|
||||||
|
|
||||||
(compare_ref, errors)
|
(compare_ref, errors)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn typeset(
|
|
||||||
ctx: &mut Context,
|
|
||||||
source: &SourceFile,
|
|
||||||
) -> TypResult<(LayoutTree, Vec<Rc<Frame>>)> {
|
|
||||||
let ast = parse(source)?;
|
|
||||||
let module = eval(ctx, source.file(), Rc::new(ast))?;
|
|
||||||
let tree = exec(ctx, &module.template);
|
|
||||||
let frames = layout(ctx, &tree);
|
|
||||||
Ok((tree, frames))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn print_error(source: &SourceFile, line: usize, error: &Error) {
|
fn print_error(source: &SourceFile, line: usize, error: &Error) {
|
||||||
let start_line = line + source.pos_to_line(error.span.start).unwrap();
|
let start_line = 1 + line + source.pos_to_line(error.span.start).unwrap();
|
||||||
let start_col = source.pos_to_column(error.span.start).unwrap();
|
let start_col = 1 + source.pos_to_column(error.span.start).unwrap();
|
||||||
let end_line = line + source.pos_to_line(error.span.end).unwrap();
|
let end_line = 1 + line + source.pos_to_line(error.span.end).unwrap();
|
||||||
let end_col = source.pos_to_column(error.span.end).unwrap();
|
let end_col = 1 + source.pos_to_column(error.span.end).unwrap();
|
||||||
println!(
|
println!(
|
||||||
"Error: {}:{}-{}:{}: {}",
|
"Error: {}:{}-{}:{}: {}",
|
||||||
start_line, start_col, end_line, end_col, error.message
|
start_line, start_col, end_line, end_col, error.message
|
||||||
|
Loading…
x
Reference in New Issue
Block a user