mirror of
https://github.com/typst/typst
synced 2025-05-13 20:46:23 +08:00
Reformat some things 🧺
This commit is contained in:
parent
e2d17aa9d9
commit
5c04185892
@ -29,7 +29,9 @@ fn run() -> Result<(), Box<dyn Error>> {
|
||||
// Compute the output filename from the input filename by replacing the extension.
|
||||
let dest_path = if args.len() <= 2 {
|
||||
let stem = source_path.file_stem().ok_or_else(|| "missing destation file name")?;
|
||||
|
||||
let base = source_path.parent().ok_or_else(|| "missing destation folder")?;
|
||||
|
||||
base.join(format!("{}.pdf", stem.to_string_lossy()))
|
||||
} else {
|
||||
PathBuf::from(&args[2])
|
||||
|
@ -5,6 +5,7 @@ use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
|
||||
use toddle::query::FontClass;
|
||||
|
||||
use crate::layout::{Layout, MultiLayout , LayoutContext, LayoutResult};
|
||||
use crate::parsing::{ParseContext, ParseResult};
|
||||
use crate::syntax::{SyntaxTree, FuncHeader};
|
||||
|
@ -1,7 +1,8 @@
|
||||
//! Drawing and cofiguration actions used by layouts.
|
||||
//! Drawing and cofiguration actions composing layouts.
|
||||
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::io::{self, Write};
|
||||
|
||||
use crate::size::Size2D;
|
||||
use super::Layout;
|
||||
use LayoutAction::*;
|
||||
|
@ -1,10 +1,6 @@
|
||||
//! Flexible and lazy layouting of boxes.
|
||||
|
||||
use crate::size::{Size, Size2D};
|
||||
use super::*;
|
||||
|
||||
|
||||
|
||||
/// Finishes a flex layout by justifying the positions of the individual boxes.
|
||||
#[derive(Debug)]
|
||||
pub struct FlexLayouter {
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! The layouting engine.
|
||||
//! The core layouting engine.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::io::{self, Write};
|
||||
@ -12,7 +12,6 @@ use crate::size::{Size, Size2D, SizeBox};
|
||||
use crate::syntax::{SyntaxTree, Node, FuncCall};
|
||||
use crate::style::TextStyle;
|
||||
|
||||
|
||||
mod text;
|
||||
mod stacked;
|
||||
mod flex;
|
||||
|
@ -1,5 +1,3 @@
|
||||
//! Layouting of text into boxes.
|
||||
|
||||
use toddle::query::{FontQuery, SharedFontLoader};
|
||||
use toddle::tables::{Header, CharMap, HorizontalMetrics};
|
||||
|
||||
|
10
src/lib.rs
10
src/lib.rs
@ -2,12 +2,11 @@
|
||||
//!
|
||||
//! # Steps
|
||||
//! - **Parsing:** The parsing step first transforms a plain string into an [iterator of
|
||||
//! tokens](crate::parsing::Tokens). Then the [parser](crate::parsing::Parser) operates on that to
|
||||
//! construct a syntax tree. The structures describing the tree can be found in the [syntax]
|
||||
//! module.
|
||||
//! tokens](crate::parsing::Tokens). Then parser constructs a syntax tree from the token stream.
|
||||
//! The structures describing the tree can be found in the [syntax]. Dynamic functions parse
|
||||
//! their own bodies themselves.
|
||||
//! - **Layouting:** The next step is to transform the syntax tree into a portable representation of
|
||||
//! the typesetted document. Types for these can be found in the [doc] and [layout] modules. This
|
||||
//! representation contains already the finished layout.
|
||||
//! the typesetted document. Types for these can be found in the [layout] module.
|
||||
//! - **Exporting:** The finished document can then be exported into supported formats. Submodules
|
||||
//! for the supported formats are located in the [export] module. Currently the only supported
|
||||
//! format is _PDF_.
|
||||
@ -15,6 +14,7 @@
|
||||
pub extern crate toddle;
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
||||
use toddle::query::{FontLoader, SharedFontLoader, FontProvider};
|
||||
|
||||
use crate::func::Scope;
|
||||
|
@ -1,5 +1,3 @@
|
||||
//! Alignment function.
|
||||
|
||||
use super::prelude::*;
|
||||
use crate::layout::Alignment;
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Basic style functions: bold, italic, monospace.
|
||||
use toddle::query::FontClass;
|
||||
|
||||
use super::prelude::*;
|
||||
use toddle::query::FontClass;
|
||||
|
||||
|
||||
macro_rules! style_func {
|
||||
|
@ -1,17 +1,10 @@
|
||||
//! Error handling.
|
||||
|
||||
/// Create an error type.
|
||||
macro_rules! error_type {
|
||||
( // The variable used instead of self in functions
|
||||
// followed by the error type things are happening on.
|
||||
(
|
||||
$var:ident: $err:ident,
|
||||
// Optionally the name of a result type to generate.
|
||||
$(res: $res:ident,)*
|
||||
// A `Display` and `Debug` implementation.
|
||||
show: $f:ident => $show:expr,
|
||||
// Optionally a `source` function for the `std::error::Error` trait.
|
||||
$(source: $source:expr,)*
|
||||
// Any number of `From` implementations.
|
||||
$(from: ($from:path, $conv:expr),)*
|
||||
) => {
|
||||
// Possibly create a result type.
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Parsing of source code into token streams an syntax trees.
|
||||
//! Parsing of source code into token streams and syntax trees.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use crate::func::{Function, Scope};
|
||||
@ -8,6 +9,7 @@ use crate::syntax::*;
|
||||
use crate::size::Size;
|
||||
|
||||
mod tokens;
|
||||
|
||||
pub use tokens::{tokenize, Tokens};
|
||||
|
||||
|
||||
@ -539,6 +541,7 @@ mod tests {
|
||||
|
||||
/// Parse the basic cases.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_base() {
|
||||
test("", tree! []);
|
||||
test("Hello World!", tree! [ T("Hello"), S, T("World!") ]);
|
||||
@ -546,6 +549,7 @@ mod tests {
|
||||
|
||||
/// Test whether newlines generate the correct whitespace.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_newlines_whitespace() {
|
||||
test("Hello\nWorld", tree! [ T("Hello"), S, T("World") ]);
|
||||
test("Hello \n World", tree! [ T("Hello"), S, T("World") ]);
|
||||
@ -558,6 +562,7 @@ mod tests {
|
||||
|
||||
/// Parse things dealing with functions.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_functions() {
|
||||
let mut scope = Scope::new();
|
||||
scope.add::<BodylessFn>("test");
|
||||
@ -590,6 +595,7 @@ mod tests {
|
||||
|
||||
/// Parse functions with arguments.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_function_args() {
|
||||
use Expression::{Number as N, Size as Z, Bool as B};
|
||||
|
||||
@ -599,14 +605,16 @@ mod tests {
|
||||
fn I(string: &str) -> Expression { Expression::Ident(string.to_owned()) }
|
||||
|
||||
fn func(name: &str, args: Vec<Expression>) -> SyntaxTree {
|
||||
tree! [ F(FuncCall {
|
||||
header: FuncHeader {
|
||||
name: name.to_string(),
|
||||
args,
|
||||
kwargs: HashMap::new(),
|
||||
},
|
||||
body: Box::new(BodylessFn)
|
||||
}) ]
|
||||
tree! [
|
||||
F(FuncCall {
|
||||
header: FuncHeader {
|
||||
name: name.to_string(),
|
||||
args,
|
||||
kwargs: HashMap::new(),
|
||||
},
|
||||
body: Box::new(BodylessFn)
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
let mut scope = Scope::new();
|
||||
@ -626,6 +634,7 @@ mod tests {
|
||||
|
||||
/// Parse comments (line and block).
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_comments() {
|
||||
let mut scope = Scope::new();
|
||||
scope.add::<BodylessFn>("test");
|
||||
@ -641,6 +650,7 @@ mod tests {
|
||||
|
||||
/// Test if escaped, but unbalanced parens are correctly parsed.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_unbalanced_body_parens() {
|
||||
let mut scope = Scope::new();
|
||||
scope.add::<TreeFn>("code");
|
||||
@ -667,6 +677,7 @@ mod tests {
|
||||
|
||||
/// Tests if the parser handles non-ASCII stuff correctly.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_unicode() {
|
||||
let mut scope = Scope::new();
|
||||
scope.add::<BodylessFn>("func");
|
||||
@ -689,6 +700,7 @@ mod tests {
|
||||
|
||||
/// Tests whether errors get reported correctly.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn parse_errors() {
|
||||
let mut scope = Scope::new();
|
||||
scope.add::<TreeFn>("hello");
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! Tokenization of text.
|
||||
|
||||
use std::str::CharIndices;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::syntax::*;
|
||||
|
||||
|
||||
@ -350,6 +350,7 @@ mod tests {
|
||||
|
||||
/// Tokenizes the basic building blocks.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_base() {
|
||||
test("", vec![]);
|
||||
test("Hallo", vec![T("Hallo")]);
|
||||
@ -363,17 +364,20 @@ mod tests {
|
||||
|
||||
/// This test looks if LF- and CRLF-style newlines get both identified correctly.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_whitespace_newlines() {
|
||||
test(" \t", vec![S]);
|
||||
test("First line\r\nSecond line\nThird line\n",
|
||||
vec![T("First"), S, T("line"), N, T("Second"), S, T("line"), N,
|
||||
T("Third"), S, T("line"), N]);
|
||||
test("First line\r\nSecond line\nThird line\n", vec![
|
||||
T("First"), S, T("line"), N, T("Second"), S, T("line"), N,
|
||||
T("Third"), S, T("line"), N
|
||||
]);
|
||||
test("Hello \n ", vec![T("Hello"), S, N, S]);
|
||||
test("Dense\nTimes", vec![T("Dense"), N, T("Times")]);
|
||||
}
|
||||
|
||||
/// Tests if escaping with backslash works as it should.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_escape() {
|
||||
test(r"\[", vec![T("[")]);
|
||||
test(r"\]", vec![T("]")]);
|
||||
@ -386,12 +390,14 @@ mod tests {
|
||||
|
||||
/// Tests if escaped strings work.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_quoted() {
|
||||
test(r#"[align: "hello\"world"]"#, vec![L, T("align"), C, S, Q(r#"hello\"world"#), R]);
|
||||
}
|
||||
|
||||
/// Tokenizes some more realistic examples.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_examples() {
|
||||
test(r"
|
||||
[function][
|
||||
@ -418,29 +424,30 @@ mod tests {
|
||||
/// This test checks whether the colon and equals symbols get parsed correctly depending on the
|
||||
/// context: Either in a function header or in a body.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_symbols_context() {
|
||||
test("[func: key=value][Answer: 7]",
|
||||
vec![L, T("func"), C, S, T("key"), E, T("value"), R, L,
|
||||
T("Answer:"), S, T("7"), R]);
|
||||
test("[[n: k=v]:x][:[=]]:=",
|
||||
vec![L, L, T("n"), C, S, T("k"), E, T("v"), R, C, T("x"), R,
|
||||
L, T(":"), L, E, R, R, T(":=")]);
|
||||
test("[hi: k=[func][body] v=1][hello]",
|
||||
vec![L, T("hi"), C, S, T("k"), E, L, T("func"), R, L, T("body"), R, S,
|
||||
T("v"), E, T("1"), R, L, T("hello"), R]);
|
||||
test("[func: __key__=value]",
|
||||
vec![L, T("func"), C, S, T("__key__"), E, T("value"), R]);
|
||||
test("The /*[*/ answer: 7.",
|
||||
vec![T("The"), S, BC("["), S, T("answer:"), S, T("7.")]);
|
||||
test("[func: key=value][Answer: 7]", vec![
|
||||
L, T("func"), C, S, T("key"), E, T("value"), R, L,
|
||||
T("Answer:"), S, T("7"), R
|
||||
]);
|
||||
test("[[n: k=v]:x][:[=]]:=", vec![
|
||||
L, L, T("n"), C, S, T("k"), E, T("v"), R, C, T("x"), R,
|
||||
L, T(":"), L, E, R, R, T(":=")
|
||||
]);
|
||||
test("[hi: k=[func][body] v=1][hello]", vec![
|
||||
L, T("hi"), C, S, T("k"), E, L, T("func"), R, L, T("body"), R, S,
|
||||
T("v"), E, T("1"), R, L, T("hello"), R
|
||||
]);
|
||||
test("[func: __key__=value]", vec![L, T("func"), C, S, T("__key__"), E, T("value"), R]);
|
||||
test("The /*[*/ answer: 7.", vec![T("The"), S, BC("["), S, T("answer:"), S, T("7.")]);
|
||||
}
|
||||
|
||||
/// Test if block and line comments get tokenized as expected.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_comments() {
|
||||
test("These // Line comments.",
|
||||
vec![T("These"), S, LC(" Line comments.")]);
|
||||
test("This /* is */ a comment.",
|
||||
vec![T("This"), S, BC(" is "), S, T("a"), S, T("comment.")]);
|
||||
test("These // Line comments.", vec![T("These"), S, LC(" Line comments.")]);
|
||||
test("This /* is */ a comment.", vec![T("This"), S, BC(" is "), S, T("a"), S, T("comment.")]);
|
||||
test("[Head/*of*/][Body]", vec![L, T("Head"), BC("of"), R, L, T("Body"), R]);
|
||||
test("/* Hey */ */", vec![BC(" Hey "), S, SS]);
|
||||
test("Hey\n// Yoo /*\n*/", vec![T("Hey"), N, LC(" Yoo /*"), N, SS]);
|
||||
@ -449,6 +456,7 @@ mod tests {
|
||||
|
||||
/// This test has a special look at the underscore syntax.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_underscores() {
|
||||
test("he_llo_world_ __ Now this_ is_ special!",
|
||||
vec![T("he"), TU, T("llo"), TU, T("world"), TU, S, TU, TU, S, T("Now"), S,
|
||||
@ -457,9 +465,9 @@ mod tests {
|
||||
|
||||
/// This test is for checking if non-ASCII characters get parsed correctly.
|
||||
#[test]
|
||||
#[rustfmt::skip]
|
||||
fn tokenize_unicode() {
|
||||
test("[document][Hello 🌍!]",
|
||||
vec![L, T("document"), R, L, T("Hello"), S, T("🌍!"), R]);
|
||||
test("[document][Hello 🌍!]", vec![L, T("document"), R, L, T("Hello"), S, T("🌍!"), R]);
|
||||
test("[f]⺐.", vec![L, T("f"), R, T("⺐.")]);
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
//! General spacing types.
|
||||
//! Different-dimensional spacing types.
|
||||
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
@ -1,6 +1,7 @@
|
||||
//! Styles for layouting.
|
||||
//! Layouting styles.
|
||||
|
||||
use toddle::query::FontClass;
|
||||
|
||||
use crate::size::{Size, Size2D, SizeBox};
|
||||
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
|
||||
use crate::func::Function;
|
||||
use crate::size::Size;
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user