Many fixes

This commit is contained in:
Laurenz 2022-06-13 23:16:40 +02:00
parent 891af17260
commit c81e2a5f56
74 changed files with 593 additions and 675 deletions

View File

@ -116,7 +116,7 @@ fn process_const(
) -> Result<(Property, syn::ItemMod)> { ) -> Result<(Property, syn::ItemMod)> {
let property = parse_property(item)?; let property = parse_property(item)?;
// The display name, e.g. `TextNode::STRONG`. // The display name, e.g. `TextNode::BOLD`.
let name = format!("{}::{}", self_name, &item.ident); let name = format!("{}::{}", self_name, &item.ident);
// The type of the property's value is what the user of our macro wrote // The type of the property's value is what the user of our macro wrote
@ -134,7 +134,7 @@ fn process_const(
value_ty.clone() value_ty.clone()
}; };
// ... but the real type of the const becomes this.. // ... but the real type of the const becomes this ...
let key = quote! { Key<#value_ty, #self_args> }; let key = quote! { Key<#value_ty, #self_args> };
let phantom_args = self_args.iter().filter(|arg| match arg { let phantom_args = self_args.iter().filter(|arg| match arg {
syn::GenericArgument::Type(syn::Type::Path(path)) => { syn::GenericArgument::Type(syn::Type::Path(path)) => {
@ -148,9 +148,11 @@ fn process_const(
let default = &item.expr; let default = &item.expr;
// Ensure that the type is either `Copy` or that the property is referenced // Ensure that the type is
// or that the property isn't copy but can't be referenced because it needs // - either `Copy`, or
// folding. // - that the property is referenced, or
// - that the property isn't copy but can't be referenced because it needs
// folding.
let get; let get;
let mut copy = None; let mut copy = None;

View File

@ -137,7 +137,7 @@ impl Array {
} }
/// Return a new array with only those elements for which the function /// Return a new array with only those elements for which the function
/// return true. /// returns true.
pub fn filter(&self, vm: &mut Machine, f: Spanned<Func>) -> TypResult<Self> { pub fn filter(&self, vm: &mut Machine, f: Spanned<Func>) -> TypResult<Self> {
let mut kept = vec![]; let mut kept = vec![];
for item in self.iter() { for item in self.iter() {
@ -154,7 +154,7 @@ impl Array {
/// Return a new array with all items from this and nested arrays. /// Return a new array with all items from this and nested arrays.
pub fn flatten(&self) -> Self { pub fn flatten(&self) -> Self {
let mut flat = vec![]; let mut flat = Vec::with_capacity(self.0.len());
for item in self.iter() { for item in self.iter() {
if let Value::Array(nested) = item { if let Value::Array(nested) = item {
flat.extend(nested.flatten().into_iter()); flat.extend(nested.flatten().into_iter());

View File

@ -43,8 +43,8 @@ impl<'a> CapturesVisitor<'a> {
match node.cast() { match node.cast() {
// Every identifier is a potential variable that we need to capture. // Every identifier is a potential variable that we need to capture.
// Identifiers that shouldn't count as captures because they // Identifiers that shouldn't count as captures because they
// actually bind a new name are handled further below (individually // actually bind a new name are handled below (individually through
// through the expressions that contain them). // the expressions that contain them).
Some(Expr::Ident(ident)) => self.capture(ident), Some(Expr::Ident(ident)) => self.capture(ident),
// Code and content blocks create a scope. // Code and content blocks create a scope.
@ -179,7 +179,7 @@ mod tests {
test("#import x, y from z", &["z"]); test("#import x, y from z", &["z"]);
test("#import x, y, z from x + y", &["x", "y"]); test("#import x, y, z from x + y", &["x", "y"]);
// Scoping. // Blocks.
test("{ let x = 1; { let y = 2; y }; x + y }", &["y"]); test("{ let x = 1; { let y = 2; y }; x + y }", &["y"]);
test("[#let x = 1]#x", &["x"]); test("[#let x = 1]#x", &["x"]);
} }

View File

@ -46,7 +46,7 @@ impl Dict {
} }
/// Borrow the value the given `key` maps to. /// Borrow the value the given `key` maps to.
pub fn get(&self, key: &EcoString) -> StrResult<&Value> { pub fn get(&self, key: &str) -> StrResult<&Value> {
self.0.get(key).ok_or_else(|| missing_key(key)) self.0.get(key).ok_or_else(|| missing_key(key))
} }
@ -59,7 +59,7 @@ impl Dict {
} }
/// Whether the dictionary contains a specific key. /// Whether the dictionary contains a specific key.
pub fn contains(&self, key: &EcoString) -> bool { pub fn contains(&self, key: &str) -> bool {
self.0.contains_key(key) self.0.contains_key(key)
} }
@ -69,7 +69,7 @@ impl Dict {
} }
/// Remove a mapping by `key`. /// Remove a mapping by `key`.
pub fn remove(&mut self, key: &EcoString) -> StrResult<()> { pub fn remove(&mut self, key: &str) -> StrResult<()> {
match Arc::make_mut(&mut self.0).remove(key) { match Arc::make_mut(&mut self.0).remove(key) {
Some(_) => Ok(()), Some(_) => Ok(()),
None => Err(missing_key(key)), None => Err(missing_key(key)),
@ -87,12 +87,12 @@ impl Dict {
/// Return the keys of the dictionary as an array. /// Return the keys of the dictionary as an array.
pub fn keys(&self) -> Array { pub fn keys(&self) -> Array {
self.iter().map(|(key, _)| Value::Str(key.clone())).collect() self.0.keys().cloned().map(Value::Str).collect()
} }
/// Return the values of the dictionary as an array. /// Return the values of the dictionary as an array.
pub fn values(&self) -> Array { pub fn values(&self) -> Array {
self.iter().map(|(_, value)| value.clone()).collect() self.0.values().cloned().collect()
} }
/// Transform each pair in the array with a function. /// Transform each pair in the array with a function.
@ -114,8 +114,8 @@ impl Dict {
/// The missing key access error message. /// The missing key access error message.
#[cold] #[cold]
fn missing_key(key: &EcoString) -> String { fn missing_key(key: &str) -> String {
format!("dictionary does not contain key {:?}", key) format!("dictionary does not contain key {:?}", EcoString::from(key))
} }
impl Debug for Dict { impl Debug for Dict {

View File

@ -105,7 +105,7 @@ impl Func {
self.call(&mut vm, args) self.call(&mut vm, args)
} }
/// Execute the function's set rule. /// Execute the function's set rule and return the resulting style map.
pub fn set(&self, mut args: Args) -> TypResult<StyleMap> { pub fn set(&self, mut args: Args) -> TypResult<StyleMap> {
let styles = match self.0.as_ref() { let styles = match self.0.as_ref() {
Repr::Native(Native { set: Some(set), .. }) => set(&mut args)?, Repr::Native(Native { set: Some(set), .. }) => set(&mut args)?,
@ -139,7 +139,7 @@ impl PartialEq for Func {
} }
} }
/// A native rust function. /// A function defined by a native rust function or node.
struct Native { struct Native {
/// The name of the function. /// The name of the function.
pub name: &'static str, pub name: &'static str,
@ -171,17 +171,17 @@ pub trait Node: 'static {
/// node's set rule. /// node's set rule.
fn construct(vm: &mut Machine, args: &mut Args) -> TypResult<Content>; fn construct(vm: &mut Machine, args: &mut Args) -> TypResult<Content>;
/// Parse the arguments into style properties for this node. /// Parse relevant arguments into style properties for this node.
/// ///
/// When `constructor` is true, [`construct`](Self::construct) will run /// When `constructor` is true, [`construct`](Self::construct) will run
/// after this invocation of `set`. /// after this invocation of `set` with the remaining arguments.
fn set(args: &mut Args, constructor: bool) -> TypResult<StyleMap>; fn set(args: &mut Args, constructor: bool) -> TypResult<StyleMap>;
} }
/// A user-defined closure. /// A user-defined closure.
#[derive(Hash)] #[derive(Hash)]
pub struct Closure { pub struct Closure {
/// The location where the closure was defined. /// The source file where the closure was defined.
pub location: Option<SourceId>, pub location: Option<SourceId>,
/// The name of the closure. /// The name of the closure.
pub name: Option<EcoString>, pub name: Option<EcoString>,
@ -199,8 +199,8 @@ pub struct Closure {
impl Closure { impl Closure {
/// Call the function in the context with the arguments. /// Call the function in the context with the arguments.
pub fn call(&self, vm: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn call(&self, vm: &mut Machine, args: &mut Args) -> TypResult<Value> {
// Don't leak the scopes from the call site. Instead, we use the // Don't leak the scopes from the call site. Instead, we use the scope
// scope of captured variables we collected earlier. // of captured variables we collected earlier.
let mut scopes = Scopes::new(None); let mut scopes = Scopes::new(None);
scopes.top = self.captured.clone(); scopes.top = self.captured.clone();

View File

@ -11,7 +11,7 @@ use crate::Context;
pub struct Machine<'a> { pub struct Machine<'a> {
/// The core context. /// The core context.
pub ctx: &'a mut Context, pub ctx: &'a mut Context,
/// The route of source ids at which the machine is located. /// The route of source ids the machine took to reach its current location.
pub route: Vec<SourceId>, pub route: Vec<SourceId>,
/// The dependencies of the current evaluation process. /// The dependencies of the current evaluation process.
pub deps: Vec<(SourceId, usize)>, pub deps: Vec<(SourceId, usize)>,

View File

@ -72,7 +72,7 @@ pub fn call(
Value::Dyn(dynamic) => match method { Value::Dyn(dynamic) => match method {
"matches" => { "matches" => {
if let Some(regex) = dynamic.downcast::<Regex>() { if let Some(regex) = dynamic.downcast::<Regex>() {
Value::Bool(regex.matches(&args.expect::<EcoString>("text")?)) Value::Bool(regex.is_match(&args.expect::<EcoString>("text")?))
} else { } else {
missing()? missing()?
} }
@ -125,7 +125,7 @@ pub fn call_mut(
}, },
Value::Dict(dict) => match method { Value::Dict(dict) => match method {
"remove" => dict.remove(&args.expect("key")?).at(span)?, "remove" => dict.remove(&args.expect::<EcoString>("key")?).at(span)?,
_ => missing()?, _ => missing()?,
}, },

View File

@ -30,7 +30,7 @@ pub fn join(lhs: Value, rhs: Value) -> StrResult<Value> {
}) })
} }
/// Apply the plus operator to a value. /// Apply the unary plus operator to a value.
pub fn pos(value: Value) -> StrResult<Value> { pub fn pos(value: Value) -> StrResult<Value> {
Ok(match value { Ok(match value {
Int(v) => Int(v), Int(v) => Int(v),
@ -281,7 +281,7 @@ pub fn eq(lhs: Value, rhs: Value) -> StrResult<Value> {
Ok(Bool(equal(&lhs, &rhs))) Ok(Bool(equal(&lhs, &rhs)))
} }
/// Compute whether two values are equal. /// Compute whether two values are unequal.
pub fn neq(lhs: Value, rhs: Value) -> StrResult<Value> { pub fn neq(lhs: Value, rhs: Value) -> StrResult<Value> {
Ok(Bool(!equal(&lhs, &rhs))) Ok(Bool(!equal(&lhs, &rhs)))
} }

View File

@ -92,7 +92,7 @@ pub struct RawStroke<T = RawLength> {
} }
impl RawStroke<Length> { impl RawStroke<Length> {
/// Unpack the stroke, filling missing fields with `default`. /// Unpack the stroke, filling missing fields from the `default`.
pub fn unwrap_or(self, default: Stroke) -> Stroke { pub fn unwrap_or(self, default: Stroke) -> Stroke {
Stroke { Stroke {
paint: self.paint.unwrap_or(default.paint), paint: self.paint.unwrap_or(default.paint),

View File

@ -122,7 +122,7 @@ impl Debug for Scope {
} }
} }
/// A slot where a variable is stored. /// A slot where a value is stored.
#[derive(Clone, Hash)] #[derive(Clone, Hash)]
struct Slot { struct Slot {
/// The stored value. /// The stored value.
@ -141,17 +141,17 @@ enum Kind {
} }
impl Slot { impl Slot {
/// Create a new constant slot. /// Create a new slot.
fn new(value: Value, kind: Kind) -> Self { fn new(value: Value, kind: Kind) -> Self {
Self { value, kind } Self { value, kind }
} }
/// Read the variable. /// Read the value.
fn read(&self) -> &Value { fn read(&self) -> &Value {
&self.value &self.value
} }
/// Try to write to the variable. /// Try to write to the value.
fn write(&mut self) -> StrResult<&mut Value> { fn write(&mut self) -> StrResult<&mut Value> {
match self.kind { match self.kind {
Kind::Normal => Ok(&mut self.value), Kind::Normal => Ok(&mut self.value),

View File

@ -45,15 +45,10 @@ impl StrExt for EcoString {
pub struct Regex(regex::Regex); pub struct Regex(regex::Regex);
impl Regex { impl Regex {
/// Create a new regex. /// Create a new regular expression.
pub fn new(re: &str) -> StrResult<Self> { pub fn new(re: &str) -> StrResult<Self> {
regex::Regex::new(re).map(Self).map_err(|err| err.to_string()) regex::Regex::new(re).map(Self).map_err(|err| err.to_string())
} }
/// Whether the regex matches the given `text`.
pub fn matches(&self, text: &str) -> bool {
self.0.is_match(text)
}
} }
impl Deref for Regex { impl Deref for Regex {

View File

@ -28,7 +28,7 @@ pub enum Value {
Int(i64), Int(i64),
/// A floating-point number: `1.2`, `10e-4`. /// A floating-point number: `1.2`, `10e-4`.
Float(f64), Float(f64),
/// A length: `12pt`, `3cm`. /// A length: `12pt`, `3cm`, `1.5em`.
Length(RawLength), Length(RawLength),
/// An angle: `1.5rad`, `90deg`. /// An angle: `1.5rad`, `90deg`.
Angle(Angle), Angle(Angle),
@ -532,10 +532,9 @@ impl<T: Cast> Cast for Option<T> {
/// A value that can be automatically determined. /// A value that can be automatically determined.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Smart<T> { pub enum Smart<T> {
/// The value should be determined smartly based on the /// The value should be determined smartly based on the circumstances.
/// circumstances.
Auto, Auto,
/// A forced, specific value. /// A specific value.
Custom(T), Custom(T),
} }
@ -629,23 +628,23 @@ where
} }
let sides = Sides { let sides = Sides {
left: dict.get(&"left".into()).or_else(|_| dict.get(&"x".into())), left: dict.get("left").or(dict.get("x")),
top: dict.get(&"top".into()).or_else(|_| dict.get(&"y".into())), top: dict.get("top").or(dict.get("y")),
right: dict.get(&"right".into()).or_else(|_| dict.get(&"x".into())), right: dict.get("right").or(dict.get("x")),
bottom: dict.get(&"bottom".into()).or_else(|_| dict.get(&"y".into())), bottom: dict.get("bottom").or(dict.get("y")),
} };
.map(|side| {
side.or_else(|_| dict.get(&"rest".into()))
.and_then(|v| T::cast(v.clone()))
.unwrap_or_default()
});
Ok(sides) Ok(sides.map(|side| {
side.or(dict.get("rest"))
.cloned()
.and_then(T::cast)
.unwrap_or_default()
}))
} }
v => T::cast(v).map(Sides::splat).map_err(|msg| { v => T::cast(v).map(Sides::splat).map_err(|msg| {
with_alternative( with_alternative(
msg, msg,
"dictionary with any of `left`, `top`, `right`, `bottom`,\ "dictionary with any of `left`, `top`, `right`, `bottom`, \
`x`, `y`, or `rest` as keys", `x`, `y`, or `rest` as keys",
) )
}), }),

View File

@ -14,7 +14,7 @@ use pdf_writer::writers::ColorSpace;
use pdf_writer::{Content, Filter, Finish, Name, PdfWriter, Rect, Ref, Str, TextStr}; use pdf_writer::{Content, Filter, Finish, Name, PdfWriter, Rect, Ref, Str, TextStr};
use ttf_parser::{name_id, GlyphId, Tag}; use ttf_parser::{name_id, GlyphId, Tag};
use crate::font::{find_name, FaceId, FontStore}; use crate::font::{FaceId, FontStore};
use crate::frame::{Destination, Element, Frame, Group, Role, Text}; use crate::frame::{Destination, Element, Frame, Group, Role, Text};
use crate::geom::{ use crate::geom::{
self, Color, Dir, Em, Geometry, Length, Numeric, Paint, Point, Ratio, Shape, Size, self, Color, Dir, Em, Geometry, Length, Numeric, Paint, Point, Ratio, Shape, Size,
@ -88,7 +88,6 @@ impl<'a> PdfExporter<'a> {
self.write_fonts(); self.write_fonts();
self.write_images(); self.write_images();
// The root page tree.
for page in std::mem::take(&mut self.pages).into_iter() { for page in std::mem::take(&mut self.pages).into_iter() {
self.write_page(page); self.write_page(page);
} }
@ -123,7 +122,8 @@ impl<'a> PdfExporter<'a> {
let metrics = face.metrics(); let metrics = face.metrics();
let ttf = face.ttf(); let ttf = face.ttf();
let postscript_name = find_name(ttf, name_id::POST_SCRIPT_NAME) let postscript_name = face
.find_name(name_id::POST_SCRIPT_NAME)
.unwrap_or_else(|| "unknown".to_string()); .unwrap_or_else(|| "unknown".to_string());
let base_font = format_eco!("ABCDEF+{}", postscript_name); let base_font = format_eco!("ABCDEF+{}", postscript_name);
@ -370,9 +370,8 @@ impl<'a> PdfExporter<'a> {
.uri(Str(uri.as_str().as_bytes())); .uri(Str(uri.as_str().as_bytes()));
} }
Destination::Internal(loc) => { Destination::Internal(loc) => {
if (1 ..= self.page_heights.len()).contains(&loc.page) { let index = loc.page.get() - 1;
let index = loc.page - 1; if let Some(&height) = self.page_heights.get(index) {
let height = self.page_heights[index];
link.action() link.action()
.action_type(ActionType::GoTo) .action_type(ActionType::GoTo)
.destination_direct() .destination_direct()
@ -457,8 +456,10 @@ impl<'a> PdfExporter<'a> {
Direction::L2R Direction::L2R
}; };
// Write the document information, catalog and wrap it up! // Write the document information.
self.writer.document_info(self.alloc.bump()).creator(TextStr("Typst")); self.writer.document_info(self.alloc.bump()).creator(TextStr("Typst"));
// Write the document catalog.
let mut catalog = self.writer.catalog(self.alloc.bump()); let mut catalog = self.writer.catalog(self.alloc.bump());
catalog.pages(self.page_tree_ref); catalog.pages(self.page_tree_ref);
catalog.viewer_preferences().direction(dir); catalog.viewer_preferences().direction(dir);
@ -556,46 +557,6 @@ struct State {
stroke_space: Option<Name<'static>>, stroke_space: Option<Name<'static>>,
} }
/// A heading that can later be linked in the outline panel.
#[derive(Debug, Clone)]
struct Heading {
content: EcoString,
level: usize,
position: Point,
page: Ref,
}
#[derive(Debug, Clone)]
struct HeadingNode {
heading: Heading,
children: Vec<HeadingNode>,
}
impl HeadingNode {
fn leaf(heading: Heading) -> Self {
HeadingNode { heading, children: Vec::new() }
}
fn len(&self) -> usize {
1 + self.children.iter().map(Self::len).sum::<usize>()
}
fn insert(&mut self, other: Heading, level: usize) -> bool {
if level >= other.level {
return false;
}
if let Some(child) = self.children.last_mut() {
if child.insert(other.clone(), level + 1) {
return true;
}
}
self.children.push(Self::leaf(other));
true
}
}
impl<'a, 'b> PageExporter<'a, 'b> { impl<'a, 'b> PageExporter<'a, 'b> {
fn new(exporter: &'a mut PdfExporter<'b>, page_ref: Ref) -> Self { fn new(exporter: &'a mut PdfExporter<'b>, page_ref: Ref) -> Self {
Self { Self {
@ -940,6 +901,47 @@ impl<'a, 'b> PageExporter<'a, 'b> {
} }
} }
/// A heading that can later be linked in the outline panel.
#[derive(Debug, Clone)]
struct Heading {
content: EcoString,
level: usize,
position: Point,
page: Ref,
}
/// A node in the outline tree.
#[derive(Debug, Clone)]
struct HeadingNode {
heading: Heading,
children: Vec<HeadingNode>,
}
impl HeadingNode {
fn leaf(heading: Heading) -> Self {
HeadingNode { heading, children: Vec::new() }
}
fn len(&self) -> usize {
1 + self.children.iter().map(Self::len).sum::<usize>()
}
fn insert(&mut self, other: Heading, level: usize) -> bool {
if level >= other.level {
return false;
}
if let Some(child) = self.children.last_mut() {
if child.insert(other.clone(), level + 1) {
return true;
}
}
self.children.push(Self::leaf(other));
true
}
}
/// Encode an image with a suitable filter and return the data, filter and /// Encode an image with a suitable filter and return the data, filter and
/// whether the image has color. /// whether the image has color.
/// ///
@ -953,7 +955,7 @@ fn encode_image(img: &RasterImage) -> ImageResult<(Vec<u8>, Filter, bool)> {
(data.into_inner(), Filter::DctDecode, false) (data.into_inner(), Filter::DctDecode, false)
} }
// 8-bit Rgb JPEG (Cmyk JPEGs get converted to Rgb earlier). // 8-bit RGB JPEG (CMYK JPEGs get converted to RGB earlier).
(ImageFormat::Jpeg, DynamicImage::ImageRgb8(_)) => { (ImageFormat::Jpeg, DynamicImage::ImageRgb8(_)) => {
let mut data = Cursor::new(vec![]); let mut data = Cursor::new(vec![]);
img.buf.write_to(&mut data, img.format)?; img.buf.write_to(&mut data, img.format)?;

View File

@ -20,8 +20,7 @@ use crate::Context;
/// returns the resulting `tiny-skia` pixel buffer. /// returns the resulting `tiny-skia` pixel buffer.
/// ///
/// In addition to the frame, you need to pass in the context used during /// In addition to the frame, you need to pass in the context used during
/// compilation so that fonts and images can be rendered and rendering artifacts /// compilation so that fonts and images can be rendered.
/// can be cached.
pub fn render(ctx: &Context, frame: &Frame, pixel_per_pt: f32) -> sk::Pixmap { pub fn render(ctx: &Context, frame: &Frame, pixel_per_pt: f32) -> sk::Pixmap {
let size = frame.size(); let size = frame.size();
let pxw = (pixel_per_pt * size.x.to_f32()).round().max(1.0) as u32; let pxw = (pixel_per_pt * size.x.to_f32()).round().max(1.0) as u32;
@ -298,7 +297,7 @@ fn render_outline_glyph(
Some(()) Some(())
} }
/// Renders a geometrical shape into the canvas. /// Render a geometrical shape into the canvas.
fn render_shape( fn render_shape(
canvas: &mut sk::Pixmap, canvas: &mut sk::Pixmap,
ts: sk::Transform, ts: sk::Transform,
@ -341,7 +340,7 @@ fn render_shape(
Some(()) Some(())
} }
/// Renders a raster or SVG image into the canvas. /// Render a raster or SVG image into the canvas.
fn render_image( fn render_image(
canvas: &mut sk::Pixmap, canvas: &mut sk::Pixmap,
ts: sk::Transform, ts: sk::Transform,

View File

@ -14,7 +14,6 @@ use unicode_segmentation::UnicodeSegmentation;
use crate::geom::Em; use crate::geom::Em;
use crate::loading::{FileHash, Loader}; use crate::loading::{FileHash, Loader};
use crate::util::decode_mac_roman;
/// A unique identifier for a loaded font face. /// A unique identifier for a loaded font face.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
@ -139,8 +138,8 @@ impl FontStore {
/// To do that we compute a key for all variants and select the one with the /// To do that we compute a key for all variants and select the one with the
/// minimal key. This key prioritizes: /// minimal key. This key prioritizes:
/// - If `like` is some other face: /// - If `like` is some other face:
/// - Are both faces (not) monospaced. /// - Are both faces (not) monospaced?
/// - Do both faces (not) have serifs. /// - Do both faces (not) have serifs?
/// - How many words do the families share in their prefix? E.g. "Noto /// - How many words do the families share in their prefix? E.g. "Noto
/// Sans" and "Noto Sans Arabic" share two words, whereas "IBM Plex /// Sans" and "Noto Sans Arabic" share two words, whereas "IBM Plex
/// Arabic" shares none with "Noto Sans", so prefer "Noto Sans Arabic" /// Arabic" shares none with "Noto Sans", so prefer "Noto Sans Arabic"
@ -165,7 +164,6 @@ impl FontStore {
let mut best = None; let mut best = None;
let mut best_key = None; let mut best_key = None;
// Find the best matching variant of this font.
for id in ids { for id in ids {
let current = &infos[id.0 as usize]; let current = &infos[id.0 as usize];
@ -237,23 +235,22 @@ fn shared_prefix_words(left: &str, right: &str) -> usize {
} }
impl_track_empty!(FontStore); impl_track_empty!(FontStore);
impl_track_empty!(&'_ mut FontStore);
impl_track_hash!(FaceId); impl_track_hash!(FaceId);
impl_track_hash!(GlyphId); impl_track_hash!(GlyphId);
/// A font face. /// A font face.
pub struct Face { pub struct Face {
/// The raw face data, possibly shared with other faces from the same /// The raw face data, possibly shared with other faces from the same
/// collection. Must stay alive put, because `ttf` points into it using /// collection. The vector's allocation must not move, because `ttf` points
/// unsafe code. /// into it using unsafe code.
buffer: Arc<Vec<u8>>, buffer: Arc<Vec<u8>>,
/// The face's index in the collection (zero if not a collection). /// The face's index in the collection (zero if not a collection).
index: u32, index: u32,
/// The underlying ttf-parser/rustybuzz face. /// The underlying ttf-parser/rustybuzz face.
ttf: rustybuzz::Face<'static>, ttf: rustybuzz::Face<'static>,
/// The faces metrics. /// The face's metrics.
metrics: FaceMetrics, metrics: FaceMetrics,
/// The parsed ReX math font. /// The parsed ReX math header.
math: OnceCell<Option<MathHeader>>, math: OnceCell<Option<MathHeader>>,
} }
@ -298,7 +295,7 @@ impl Face {
&self.ttf &self.ttf
} }
/// The number of units per em. /// The number of font units per one em.
pub fn units_per_em(&self) -> f64 { pub fn units_per_em(&self) -> f64 {
self.metrics.units_per_em self.metrics.units_per_em
} }
@ -308,16 +305,6 @@ impl Face {
&self.metrics &self.metrics
} }
/// Access the math header, if any.
pub fn math(&self) -> Option<&MathHeader> {
self.math
.get_or_init(|| {
let data = self.ttf().table_data(Tag::from_bytes(b"MATH"))?;
MathHeader::parse(data).ok()
})
.as_ref()
}
/// Convert from font units to an em length. /// Convert from font units to an em length.
pub fn to_em(&self, units: impl Into<f64>) -> Em { pub fn to_em(&self, units: impl Into<f64>) -> Em {
Em::from_units(units, self.units_per_em()) Em::from_units(units, self.units_per_em())
@ -329,6 +316,21 @@ impl Face {
.glyph_hor_advance(GlyphId(glyph)) .glyph_hor_advance(GlyphId(glyph))
.map(|units| self.to_em(units)) .map(|units| self.to_em(units))
} }
/// Access the math header, if any.
pub fn math(&self) -> Option<&MathHeader> {
self.math
.get_or_init(|| {
let data = self.ttf().table_data(Tag::from_bytes(b"MATH"))?;
MathHeader::parse(data).ok()
})
.as_ref()
}
/// Lookup a name by id.
pub fn find_name(&self, name_id: u16) -> Option<String> {
find_name_ttf(&self.ttf, name_id)
}
} }
/// Metrics for a font face. /// Metrics for a font face.
@ -396,7 +398,7 @@ impl FaceMetrics {
} }
} }
/// Look up a vertical metric at the given font size. /// Look up a vertical metric.
pub fn vertical(&self, metric: VerticalFontMetric) -> Em { pub fn vertical(&self, metric: VerticalFontMetric) -> Em {
match metric { match metric {
VerticalFontMetric::Ascender => self.ascender, VerticalFontMetric::Ascender => self.ascender,
@ -491,15 +493,15 @@ impl FaceInfo {
// sometimes doesn't for the Display variants and that mixes things // sometimes doesn't for the Display variants and that mixes things
// up. // up.
let family = { let family = {
let mut family = find_name(ttf, name_id::FAMILY)?; let mut family = find_name_ttf(ttf, name_id::FAMILY)?;
if family.starts_with("Noto") { if family.starts_with("Noto") {
family = find_name(ttf, name_id::FULL_NAME)?; family = find_name_ttf(ttf, name_id::FULL_NAME)?;
} }
trim_styles(&family).to_string() trim_styles(&family).to_string()
}; };
let variant = { let variant = {
let mut full = find_name(ttf, name_id::FULL_NAME).unwrap_or_default(); let mut full = find_name_ttf(ttf, name_id::FULL_NAME).unwrap_or_default();
full.make_ascii_lowercase(); full.make_ascii_lowercase();
// Some fonts miss the relevant bits for italic or oblique, so // Some fonts miss the relevant bits for italic or oblique, so
@ -554,7 +556,7 @@ impl FaceInfo {
} }
/// Try to find and decode the name with the given id. /// Try to find and decode the name with the given id.
pub fn find_name(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> { fn find_name_ttf(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
ttf.names().into_iter().find_map(|entry| { ttf.names().into_iter().find_map(|entry| {
if entry.name_id == name_id { if entry.name_id == name_id {
if let Some(string) = entry.to_string() { if let Some(string) = entry.to_string() {
@ -570,6 +572,31 @@ pub fn find_name(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
}) })
} }
/// Decode mac roman encoded bytes into a string.
fn decode_mac_roman(coded: &[u8]) -> String {
#[rustfmt::skip]
const TABLE: [char; 128] = [
'Ä', 'Å', 'Ç', 'É', 'Ñ', 'Ö', 'Ü', 'á', 'à', 'â', 'ä', 'ã', 'å', 'ç', 'é', 'è',
'ê', 'ë', 'í', 'ì', 'î', 'ï', 'ñ', 'ó', 'ò', 'ô', 'ö', 'õ', 'ú', 'ù', 'û', 'ü',
'†', '°', '¢', '£', '§', '•', '¶', 'ß', '®', '©', '™', '´', '¨', '≠', 'Æ', 'Ø',
'∞', '±', '≤', '≥', '¥', 'µ', '∂', '∑', '∏', 'π', '∫', 'ª', 'º', 'Ω', 'æ', 'ø',
'¿', '¡', '¬', '√', 'ƒ', '≈', '∆', '«', '»', '…', '\u{a0}', 'À', 'Ã', 'Õ', 'Œ', 'œ',
'', '—', '“', '”', '', '', '÷', '◊', 'ÿ', 'Ÿ', '', '€', '', '', 'fi', 'fl',
'‡', '·', '', '„', '‰', 'Â', 'Ê', 'Á', 'Ë', 'È', 'Í', 'Î', 'Ï', 'Ì', 'Ó', 'Ô',
'\u{f8ff}', 'Ò', 'Ú', 'Û', 'Ù', 'ı', 'ˆ', '˜', '¯', '˘', '˙', '˚', '¸', '˝', '˛', 'ˇ',
];
fn char_from_mac_roman(code: u8) -> char {
if code < 128 {
code as char
} else {
TABLE[(code - 128) as usize]
}
}
coded.iter().copied().map(char_from_mac_roman).collect()
}
/// Trim style naming from a family name. /// Trim style naming from a family name.
fn trim_styles(mut family: &str) -> &str { fn trim_styles(mut family: &str) -> &str {
// Separators between names, modifiers and styles. // Separators between names, modifiers and styles.
@ -944,7 +971,7 @@ mod tests {
test(&[0, 1], &[0, 2]); test(&[0, 1], &[0, 2]);
test(&[0, 1, 3], &[0, 2, 1, 1]); test(&[0, 1, 3], &[0, 2, 1, 1]);
test( test(
// [2, 3, 4, 9, 10, 11, 15, 18, 19] // {2, 3, 4, 9, 10, 11, 15, 18, 19}
&[18, 19, 2, 4, 9, 11, 15, 3, 3, 10], &[18, 19, 2, 4, 9, 11, 15, 3, 3, 10],
&[2, 3, 4, 3, 3, 1, 2, 2], &[2, 3, 4, 3, 3, 1, 2, 2],
) )

View File

@ -27,7 +27,7 @@ pub struct Frame {
elements: Arc<Vec<(Point, Element)>>, elements: Arc<Vec<(Point, Element)>>,
} }
/// Accessors and setters. /// Constructor, accessors and setters.
impl Frame { impl Frame {
/// Create a new, empty frame. /// Create a new, empty frame.
/// ///
@ -120,10 +120,10 @@ impl Frame {
Arc::make_mut(&mut self.elements).push((pos, element)); Arc::make_mut(&mut self.elements).push((pos, element));
} }
/// Add a frame. /// Add a frame at a position in the foreground.
/// ///
/// Automatically decides whether to inline the frame or to include it as a /// Automatically decides whether to inline the frame or to include it as a
/// group based on the number of elements in the frame. /// group based on the number of elements in and the role of the frame.
pub fn push_frame(&mut self, pos: Point, frame: Frame) { pub fn push_frame(&mut self, pos: Point, frame: Frame) {
if self.should_inline(&frame) { if self.should_inline(&frame) {
self.inline(self.layer(), pos, frame); self.inline(self.layer(), pos, frame);
@ -146,6 +146,9 @@ impl Frame {
} }
/// Add multiple elements at a position in the background. /// Add multiple elements at a position in the background.
///
/// The first element in the iterator will be the one that is most in the
/// background.
pub fn prepend_multiple<I>(&mut self, elements: I) pub fn prepend_multiple<I>(&mut self, elements: I)
where where
I: IntoIterator<Item = (Point, Element)>, I: IntoIterator<Item = (Point, Element)>,
@ -163,20 +166,20 @@ impl Frame {
} }
/// Whether the given frame should be inlined. /// Whether the given frame should be inlined.
pub fn should_inline(&self, frame: &Frame) -> bool { fn should_inline(&self, frame: &Frame) -> bool {
(self.elements.is_empty() || frame.elements.len() <= 5) (self.elements.is_empty() || frame.elements.len() <= 5)
&& frame.role().map_or(true, |role| role.is_weak()) && frame.role().map_or(true, |role| role.is_weak())
} }
/// Inline a frame at the given layer. /// Inline a frame at the given layer.
pub fn inline(&mut self, layer: usize, pos: Point, frame: Frame) { fn inline(&mut self, layer: usize, pos: Point, frame: Frame) {
// Try to just reuse the elements. // Try to just reuse the elements.
if pos.is_zero() && self.elements.is_empty() { if pos.is_zero() && self.elements.is_empty() {
self.elements = frame.elements; self.elements = frame.elements;
return; return;
} }
// Try to copy the elements without adjusting the position. // Try to transfer the elements without adjusting the position.
// Also try to reuse the elements if the Arc isn't shared. // Also try to reuse the elements if the Arc isn't shared.
let range = layer .. layer; let range = layer .. layer;
if pos.is_zero() { if pos.is_zero() {
@ -192,7 +195,7 @@ impl Frame {
return; return;
} }
// We must adjust the element positioned. // We must adjust the element positions.
// But still try to reuse the elements if the Arc isn't shared. // But still try to reuse the elements if the Arc isn't shared.
let sink = Arc::make_mut(&mut self.elements); let sink = Arc::make_mut(&mut self.elements);
match Arc::try_unwrap(frame.elements) { match Arc::try_unwrap(frame.elements) {
@ -210,7 +213,11 @@ impl Frame {
impl Frame { impl Frame {
/// Remove all elements from the frame. /// Remove all elements from the frame.
pub fn clear(&mut self) { pub fn clear(&mut self) {
self.elements = Arc::new(vec![]); if Arc::strong_count(&self.elements) == 1 {
Arc::make_mut(&mut self.elements).clear();
} else {
self.elements = Arc::new(vec![]);
}
} }
/// Resize the frame to a new size, distributing new space according to the /// Resize the frame to a new size, distributing new space according to the
@ -407,7 +414,7 @@ pub enum Destination {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Location { pub struct Location {
/// The page, starting at 1. /// The page, starting at 1.
pub page: usize, pub page: NonZeroUsize,
/// The exact coordinates on the page (from the top left, as usual). /// The exact coordinates on the page (from the top left, as usual).
pub pos: Point, pub pos: Point,
} }
@ -416,7 +423,7 @@ impl Location {
/// Encode into a user-facing dictionary. /// Encode into a user-facing dictionary.
pub fn encode(&self) -> Dict { pub fn encode(&self) -> Dict {
dict! { dict! {
"page" => Value::Int(self.page as i64), "page" => Value::Int(self.page.get() as i64),
"x" => Value::Length(self.pos.x.into()), "x" => Value::Length(self.pos.x.into()),
"y" => Value::Length(self.pos.y.into()), "y" => Value::Length(self.pos.y.into()),
} }
@ -428,27 +435,28 @@ impl Location {
pub enum Role { pub enum Role {
/// A paragraph. /// A paragraph.
Paragraph, Paragraph,
/// A heading with some level and whether it should be part of the outline. /// A heading of the given level and whether it should be part of the
/// outline.
Heading { level: NonZeroUsize, outlined: bool }, Heading { level: NonZeroUsize, outlined: bool },
/// A generic block-level subdivision. /// A generic block-level subdivision.
GenericBlock, GenericBlock,
/// A generic inline subdivision. /// A generic inline subdivision.
GenericInline, GenericInline,
/// A list. The boolean indicates whether it is ordered. /// A list and whether it is ordered.
List { ordered: bool }, List { ordered: bool },
/// A list item. Must have a list parent. /// A list item. Must have a list parent.
ListItem, ListItem,
/// The label of a list item. /// The label of a list item. Must have a list item parent.
ListLabel, ListLabel,
/// The body of a list item. /// The body of a list item. Must have a list item parent.
ListItemBody, ListItemBody,
/// A mathematical formula. /// A mathematical formula.
Formula, Formula,
/// A table. /// A table.
Table, Table,
/// A table row. /// A table row. Must have a table parent.
TableRow, TableRow,
/// A table cell. /// A table cell. Must have a table row parent.
TableCell, TableCell,
/// A code fragment. /// A code fragment.
Code, Code,
@ -466,6 +474,8 @@ impl Role {
/// Whether the role describes a generic element and is not very /// Whether the role describes a generic element and is not very
/// descriptive. /// descriptive.
pub fn is_weak(self) -> bool { pub fn is_weak(self) -> bool {
// In Typst, all text is in a paragraph, so paragraph isn't very
// descriptive.
match self { match self {
Self::Paragraph | Self::GenericBlock | Self::GenericInline => true, Self::Paragraph | Self::GenericBlock | Self::GenericInline => true,
_ => false, _ => false,

View File

@ -35,7 +35,7 @@ impl Angle {
(self.0).0 (self.0).0
} }
/// Get the value of this length in unit. /// Get the value of this angle in a unit.
pub fn to_unit(self, unit: AngleUnit) -> f64 { pub fn to_unit(self, unit: AngleUnit) -> f64 {
self.to_raw() / unit.raw_scale() self.to_raw() / unit.raw_scale()
} }

View File

@ -22,7 +22,7 @@ impl Em {
Self(Scalar(em)) Self(Scalar(em))
} }
/// Create font units at the given units per em. /// Create an em length from font units at the given units per em.
pub fn from_units(units: impl Into<f64>, units_per_em: f64) -> Self { pub fn from_units(units: impl Into<f64>, units_per_em: f64) -> Self {
Self(Scalar(units.into() / units_per_em)) Self(Scalar(units.into() / units_per_em))
} }

View File

@ -25,7 +25,7 @@ impl Fraction {
(self.0).0 (self.0).0
} }
/// The absolute value of the this fraction. /// The absolute value of this fraction.
pub fn abs(self) -> Self { pub fn abs(self) -> Self {
Self::new(self.get().abs()) Self::new(self.get().abs())
} }

View File

@ -31,7 +31,7 @@ impl<T> Gen<T> {
Gen { cross: f(self.cross), main: f(self.main) } Gen { cross: f(self.cross), main: f(self.main) }
} }
/// Convert to the specific representation, given the current block axis. /// Convert to the specific representation, given the current main axis.
pub fn to_spec(self, main: SpecAxis) -> Spec<T> { pub fn to_spec(self, main: SpecAxis) -> Spec<T> {
match main { match main {
SpecAxis::Horizontal => Spec::new(self.main, self.cross), SpecAxis::Horizontal => Spec::new(self.main, self.cross),
@ -82,9 +82,9 @@ impl<T: Debug> Debug for Gen<T> {
/// Two generic axes of a container. /// Two generic axes of a container.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum GenAxis { pub enum GenAxis {
/// The minor axis. /// The minor / inline axis.
Cross, Cross,
/// The major axis. /// The major / block axis.
Main, Main,
} }

View File

@ -10,7 +10,7 @@ impl Length {
Self(Scalar(0.0)) Self(Scalar(0.0))
} }
/// The inifinite length. /// The infinite length.
pub const fn inf() -> Self { pub const fn inf() -> Self {
Self(Scalar(f64::INFINITY)) Self(Scalar(f64::INFINITY))
} }
@ -50,7 +50,7 @@ impl Length {
(self.0).0 (self.0).0
} }
/// Get the value of this length in unit. /// Get the value of this length in a unit.
pub fn to_unit(self, unit: LengthUnit) -> f64 { pub fn to_unit(self, unit: LengthUnit) -> f64 {
self.to_raw() / unit.raw_scale() self.to_raw() / unit.raw_scale()
} }
@ -75,7 +75,7 @@ impl Length {
self.to_unit(LengthUnit::In) self.to_unit(LengthUnit::In)
} }
/// The absolute value of the this length. /// The absolute value of this length.
pub fn abs(self) -> Self { pub fn abs(self) -> Self {
Self::raw(self.to_raw().abs()) Self::raw(self.to_raw().abs())
} }
@ -100,7 +100,8 @@ impl Length {
*self = (*self).max(other); *self = (*self).max(other);
} }
/// Whether the other length fits into this one (i.e. is smaller). /// Whether the other length fits into this one (i.e. is smaller). Allows
/// for a bit of slack.
pub fn fits(self, other: Self) -> bool { pub fn fits(self, other: Self) -> bool {
self.0 + 1e-6 >= other.0 self.0 + 1e-6 >= other.0
} }

View File

@ -79,7 +79,7 @@ impl Debug for Color {
} }
} }
/// An 8-bit Luma color. /// An 8-bit grayscale color.
#[derive(Copy, Clone, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct LumaColor(pub u8); pub struct LumaColor(pub u8);

View File

@ -15,7 +15,7 @@ impl Point {
Self { x: Length::zero(), y: Length::zero() } Self { x: Length::zero(), y: Length::zero() }
} }
/// Create a new point from x and y coordinate. /// Create a new point from x and y coordinates.
pub const fn new(x: Length, y: Length) -> Self { pub const fn new(x: Length, y: Length) -> Self {
Self { x, y } Self { x, y }
} }

View File

@ -4,13 +4,15 @@ use std::mem;
/// A rectangle with rounded corners. /// A rectangle with rounded corners.
#[derive(Debug, Copy, Clone, PartialEq)] #[derive(Debug, Copy, Clone, PartialEq)]
pub struct Rect { pub struct RoundedRect {
size: Size, /// The size of the rectangle.
radius: Sides<Length>, pub size: Size,
/// The radius at each side.
pub radius: Sides<Length>,
} }
impl Rect { impl RoundedRect {
/// Create a new rectangle. /// Create a new rounded rectangle.
pub fn new(size: Size, radius: Sides<Length>) -> Self { pub fn new(size: Size, radius: Sides<Length>) -> Self {
Self { size, radius } Self { size, radius }
} }
@ -55,7 +57,6 @@ impl Rect {
} else { } else {
let mut paths = self.stroke_segments(Sides::splat(None)); let mut paths = self.stroke_segments(Sides::splat(None));
assert_eq!(paths.len(), 1); assert_eq!(paths.len(), 1);
Geometry::Path(paths.pop().unwrap().0) Geometry::Path(paths.pop().unwrap().0)
} }
} }
@ -103,7 +104,7 @@ impl Rect {
} }
/// Draws one side of the rounded rectangle. Will always draw the left arc. The /// Draws one side of the rounded rectangle. Will always draw the left arc. The
/// right arc will be drawn halfway iff there is no connection. /// right arc will be drawn halfway if and only if there is no connection.
fn draw_side( fn draw_side(
path: &mut Path, path: &mut Path,
side: Side, side: Side,
@ -114,7 +115,6 @@ fn draw_side(
) { ) {
let angle_left = Angle::deg(if connection.prev { 90.0 } else { 45.0 }); let angle_left = Angle::deg(if connection.prev { 90.0 } else { 45.0 });
let angle_right = Angle::deg(if connection.next { 90.0 } else { 45.0 }); let angle_right = Angle::deg(if connection.next { 90.0 } else { 45.0 });
let length = size.get(side.axis()); let length = size.get(side.axis());
// The arcs for a border of the rectangle along the x-axis, starting at (0,0). // The arcs for a border of the rectangle along the x-axis, starting at (0,0).
@ -166,9 +166,9 @@ fn draw_side(
} }
} }
/// A state machine that indicates which sides of the border strokes in a 2D /// Indicates which sides of the border strokes in a 2D polygon are connected to
/// polygon are connected to their neighboring sides. /// their neighboring sides.
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
struct Connection { struct Connection {
prev: bool, prev: bool,
next: bool, next: bool,

View File

@ -32,7 +32,7 @@ impl<T> Sides<T> {
} }
} }
/// Maps the individual fields with `f`. /// Map the individual fields with `f`.
pub fn map<F, U>(self, mut f: F) -> Sides<U> pub fn map<F, U>(self, mut f: F) -> Sides<U>
where where
F: FnMut(T) -> U, F: FnMut(T) -> U,
@ -58,12 +58,12 @@ impl<T> Sides<T> {
} }
} }
/// Returns an iterator over the sides. /// An iterator over the sides.
pub fn iter(&self) -> impl Iterator<Item = &T> { pub fn iter(&self) -> impl Iterator<Item = &T> {
[&self.left, &self.top, &self.right, &self.bottom].into_iter() [&self.left, &self.top, &self.right, &self.bottom].into_iter()
} }
/// Returns whether all sides are equal. /// Whether all sides are equal.
pub fn is_uniform(&self) -> bool pub fn is_uniform(&self) -> bool
where where
T: PartialEq, T: PartialEq,
@ -72,10 +72,7 @@ impl<T> Sides<T> {
} }
} }
impl<T> Sides<T> impl<T: Add> Sides<T> {
where
T: Add,
{
/// Sums up `left` and `right` into `x`, and `top` and `bottom` into `y`. /// Sums up `left` and `right` into `x`, and `top` and `bottom` into `y`.
pub fn sum_by_axis(self) -> Spec<T::Output> { pub fn sum_by_axis(self) -> Spec<T::Output> {
Spec::new(self.left + self.right, self.top + self.bottom) Spec::new(self.left + self.right, self.top + self.bottom)

View File

@ -26,7 +26,7 @@ impl<T> Spec<T> {
Self { x: v.clone(), y: v } Self { x: v.clone(), y: v }
} }
/// Maps the individual fields with `f`. /// Map the individual fields with `f`.
pub fn map<F, U>(self, mut f: F) -> Spec<U> pub fn map<F, U>(self, mut f: F) -> Spec<U>
where where
F: FnMut(T) -> U, F: FnMut(T) -> U,

View File

@ -48,8 +48,8 @@ impl Transform {
} }
/// Whether this is the identity transformation. /// Whether this is the identity transformation.
pub fn is_identity(&self) -> bool { pub fn is_identity(self) -> bool {
*self == Self::identity() self == Self::identity()
} }
/// Pre-concatenate another transformation. /// Pre-concatenate another transformation.

View File

@ -133,7 +133,7 @@ pub struct RasterImage {
} }
impl RasterImage { impl RasterImage {
/// Parse an image from raw data in a supported format (PNG or JPEG). /// Parse an image from raw data in a supported format (PNG, JPEG or GIF).
/// ///
/// The image format is determined automatically. /// The image format is determined automatically.
pub fn parse(data: &[u8]) -> io::Result<Self> { pub fn parse(data: &[u8]) -> io::Result<Self> {

View File

@ -3,17 +3,17 @@
//! # Steps //! # Steps
//! - **Parsing:** The parsing step first transforms a plain string into an //! - **Parsing:** The parsing step first transforms a plain string into an
//! [iterator of tokens][tokens]. This token stream is [parsed] into a [syntax //! [iterator of tokens][tokens]. This token stream is [parsed] into a [syntax
//! tree]. The tree itself is untyped, but a typed layer over it is provided //! tree]. The tree itself is untyped, but the [AST] module provides a typed
//! in the [AST] module. //! layer over it.
//! - **Evaluation:** The next step is to [evaluate] the markup. This produces a //! - **Evaluation:** The next step is to [evaluate] the markup. This produces a
//! [module], consisting of a scope of values that were exported by the code //! [module], consisting of a scope of values that were exported by the code
//! and [content], a hierarchical, styled representation with the contents of //! and [content], a hierarchical, styled representation of the text,
//! the module. The nodes of the content tree are well structured and //! structure, layouts, etc. of the module. The nodes of the content tree are
//! order-independent and thus much better suited for layouting than the raw //! well structured and order-independent and thus much better suited for
//! markup. //! layouting than the raw markup.
//! - **Layouting:** Next, the tree is [layouted] into a portable version of the //! - **Layouting:** Next, the content is [layouted] into a portable version of
//! typeset document. The output of this is a collection of [`Frame`]s (one //! the typeset document. The output of this is a collection of [`Frame`]s
//! per page), ready for exporting. //! (one per page), ready for exporting.
//! - **Exporting:** The finished layout can be exported into a supported //! - **Exporting:** The finished layout can be exported into a supported
//! format. Currently, the only supported output format is [PDF]. //! format. Currently, the only supported output format is [PDF].
//! //!

View File

@ -19,12 +19,9 @@ impl Layout for HideNode {
styles: StyleChain, styles: StyleChain,
) -> TypResult<Vec<Frame>> { ) -> TypResult<Vec<Frame>> {
let mut frames = self.0.layout(ctx, regions, styles)?; let mut frames = self.0.layout(ctx, regions, styles)?;
// Clear the frames.
for frame in &mut frames { for frame in &mut frames {
frame.clear(); frame.clear();
} }
Ok(frames) Ok(frames)
} }
} }

View File

@ -165,7 +165,7 @@ impl<const S: ShapeKind> Layout for ShapeNode<S> {
frame.prepend(pos, Element::Shape(shape)); frame.prepend(pos, Element::Shape(shape));
} else { } else {
frame.prepend_multiple( frame.prepend_multiple(
Rect::new(size, radius) RoundedRect::new(size, radius)
.shapes(fill, stroke) .shapes(fill, stroke)
.into_iter() .into_iter()
.map(|x| (pos, Element::Shape(x))), .map(|x| (pos, Element::Shape(x))),

View File

@ -107,6 +107,8 @@ impl<const T: TransformKind> Layout for TransformNode<T> {
} }
/// Kinds of transformations. /// Kinds of transformations.
///
/// The move transformation is handled separately.
pub type TransformKind = usize; pub type TransformKind = usize;
/// A rotational transformation. /// A rotational transformation.

View File

@ -58,10 +58,10 @@ impl Layout for ColumnsNode {
// Layout the children. // Layout the children.
let mut frames = self.child.layout(ctx, &pod, styles)?.into_iter(); let mut frames = self.child.layout(ctx, &pod, styles)?.into_iter();
let mut finished = vec![];
let dir = styles.get(TextNode::DIR); let dir = styles.get(TextNode::DIR);
let total_regions = (frames.len() as f32 / columns as f32).ceil() as usize; let total_regions = (frames.len() as f32 / columns as f32).ceil() as usize;
let mut finished = vec![];
// Stitch together the columns for each region. // Stitch together the columns for each region.
for region in regions.iter().take(total_regions) { for region in regions.iter().take(total_regions) {

View File

@ -67,7 +67,7 @@ pub enum TrackSizing {
castable! { castable! {
Vec<TrackSizing>, Vec<TrackSizing>,
Expected: "integer, auto, relative length, fraction, or array of the latter three)", Expected: "integer, auto, relative length, fraction, or array of the latter three",
Value::Auto => vec![TrackSizing::Auto], Value::Auto => vec![TrackSizing::Auto],
Value::Length(v) => vec![TrackSizing::Relative(v.into())], Value::Length(v) => vec![TrackSizing::Relative(v.into())],
Value::Ratio(v) => vec![TrackSizing::Relative(v.into())], Value::Ratio(v) => vec![TrackSizing::Relative(v.into())],

View File

@ -65,7 +65,7 @@ fn shrink(size: Size, padding: Sides<Relative<Length>>) -> Size {
/// (Vertical axis is analogous.) /// (Vertical axis is analogous.)
/// ///
/// Let w be the grown target width, /// Let w be the grown target width,
/// s be given width, /// s be the given width,
/// l be the left padding, /// l be the left padding,
/// r be the right padding, /// r be the right padding,
/// p = l + r. /// p = l + r.

View File

@ -18,7 +18,7 @@ impl PageNode {
/// Whether the page is flipped into landscape orientation. /// Whether the page is flipped into landscape orientation.
pub const FLIPPED: bool = false; pub const FLIPPED: bool = false;
/// The page margin. /// The page's margins.
#[property(fold)] #[property(fold)]
pub const MARGINS: Sides<Option<Smart<Relative<RawLength>>>> = pub const MARGINS: Sides<Option<Smart<Relative<RawLength>>>> =
Sides::splat(Smart::Auto); Sides::splat(Smart::Auto);

View File

@ -39,9 +39,8 @@ impl Layout for PlaceNode {
// If expansion is off, zero all sizes so that we don't take up any // If expansion is off, zero all sizes so that we don't take up any
// space in our parent. Otherwise, respect the expand settings. // space in our parent. Otherwise, respect the expand settings.
let frame = &mut frames[0];
let target = regions.expand.select(regions.first, Size::zero()); let target = regions.expand.select(regions.first, Size::zero());
frame.resize(target, Align::LEFT_TOP); frames[0].resize(target, Align::LEFT_TOP);
Ok(frames) Ok(frames)
} }

View File

@ -176,7 +176,8 @@ impl<'a> StackLayouter<'a> {
self.finish_region(); self.finish_region();
} }
// Align nodes' block-axis alignment is respected by the stack node. // Block-axis alignment of the `AlignNode` is respected
// by the stack node.
let align = node let align = node
.downcast::<AlignNode>() .downcast::<AlignNode>()
.and_then(|node| node.aligns.get(self.axis)) .and_then(|node| node.aligns.get(self.axis))

View File

@ -34,7 +34,7 @@ impl Layout for RexNode {
.ok_or("failed to find math font") .ok_or("failed to find math font")
.at(span)?; .at(span)?;
// Prepare the font. // Prepare the font context.
let face = ctx.fonts.get(face_id); let face = ctx.fonts.get(face_id);
let ctx = face let ctx = face
.math() .math()

View File

@ -19,5 +19,5 @@ pub use crate::model::{
StyleChain, StyleMap, StyleVec, StyleChain, StyleMap, StyleVec,
}; };
pub use crate::syntax::{Span, Spanned}; pub use crate::syntax::{Span, Spanned};
pub use crate::util::{EcoString, OptionExt}; pub use crate::util::EcoString;
pub use crate::Context; pub use crate::Context;

View File

@ -20,8 +20,8 @@ pub type OverlineNode = DecoNode<OVERLINE>;
#[node(showable)] #[node(showable)]
impl<const L: DecoLine> DecoNode<L> { impl<const L: DecoLine> DecoNode<L> {
/// How to stroke the line. The text color and thickness read from the font /// How to stroke the line. The text color and thickness are read from the
/// tables if `auto`. /// font tables if `auto`.
#[property(shorthand, resolve, fold)] #[property(shorthand, resolve, fold)]
pub const STROKE: Smart<RawStroke> = Smart::Auto; pub const STROKE: Smart<RawStroke> = Smart::Auto;
/// Position of the line relative to the baseline, read from the font tables /// Position of the line relative to the baseline, read from the font tables

View File

@ -1,7 +1,7 @@
use super::TextNode; use super::TextNode;
use crate::library::prelude::*; use crate::library::prelude::*;
/// Link text and other elements to an URL. /// Link text and other elements to a destination.
#[derive(Debug, Hash)] #[derive(Debug, Hash)]
pub struct LinkNode { pub struct LinkNode {
/// The destination the link points to. /// The destination the link points to.
@ -15,7 +15,7 @@ impl LinkNode {
/// The fill color of text in the link. Just the surrounding text color /// The fill color of text in the link. Just the surrounding text color
/// if `auto`. /// if `auto`.
pub const FILL: Smart<Paint> = Smart::Auto; pub const FILL: Smart<Paint> = Smart::Auto;
/// Whether to underline link. /// Whether to underline the link.
pub const UNDERLINE: Smart<bool> = Smart::Auto; pub const UNDERLINE: Smart<bool> = Smart::Auto;
fn construct(_: &mut Machine, args: &mut Args) -> TypResult<Content> { fn construct(_: &mut Machine, args: &mut Args) -> TypResult<Content> {
@ -35,10 +35,10 @@ castable! {
Expected: "string or dictionary with `page`, `x`, and `y` keys", Expected: "string or dictionary with `page`, `x`, and `y` keys",
Value::Str(string) => Self::Url(string), Value::Str(string) => Self::Url(string),
Value::Dict(dict) => { Value::Dict(dict) => {
let page: i64 = dict.get(&"page".into())?.clone().cast()?; let page = dict.get("page")?.clone().cast()?;
let x: RawLength = dict.get(&"x".into())?.clone().cast()?; let x: RawLength = dict.get("x")?.clone().cast()?;
let y: RawLength = dict.get(&"y".into())?.clone().cast()?; let y: RawLength = dict.get("y")?.clone().cast()?;
Self::Internal(Location { page: page as usize, pos: Point::new(x.length, y.length) }) Self::Internal(Location { page, pos: Point::new(x.length, y.length) })
}, },
} }

View File

@ -59,13 +59,13 @@ impl TextNode {
/// The amount of space that should be added between characters. /// The amount of space that should be added between characters.
#[property(resolve)] #[property(resolve)]
pub const TRACKING: RawLength = RawLength::zero(); pub const TRACKING: RawLength = RawLength::zero();
/// The width of spaces relative to the default space width. /// The width of spaces relative to the font's space width.
#[property(resolve)] #[property(resolve)]
pub const SPACING: Relative<RawLength> = Relative::one(); pub const SPACING: Relative<RawLength> = Relative::one();
/// The offset of the baseline. /// The offset of the baseline.
#[property(resolve)] #[property(resolve)]
pub const BASELINE: RawLength = RawLength::zero(); pub const BASELINE: RawLength = RawLength::zero();
/// Whether glyphs can hang over into the margin. /// Whether certain glyphs can hang over into the margin.
pub const OVERHANG: bool = true; pub const OVERHANG: bool = true;
/// The top end of the text bounding box. /// The top end of the text bounding box.
pub const TOP_EDGE: TextEdge = TextEdge::Metric(VerticalFontMetric::CapHeight); pub const TOP_EDGE: TextEdge = TextEdge::Metric(VerticalFontMetric::CapHeight);
@ -114,7 +114,7 @@ impl TextNode {
/// Whether the font weight should be increased by 300. /// Whether the font weight should be increased by 300.
#[property(skip, fold)] #[property(skip, fold)]
pub const BOLD: Toggle = false; pub const BOLD: Toggle = false;
/// Whether the the font style should be inverted. /// Whether the font style should be inverted.
#[property(skip, fold)] #[property(skip, fold)]
pub const ITALIC: Toggle = false; pub const ITALIC: Toggle = false;
/// A case transformation that should be applied to the text. /// A case transformation that should be applied to the text.
@ -123,7 +123,7 @@ impl TextNode {
/// Whether small capital glyphs should be used. ("smcp") /// Whether small capital glyphs should be used. ("smcp")
#[property(skip)] #[property(skip)]
pub const SMALLCAPS: bool = false; pub const SMALLCAPS: bool = false;
/// An URL the text should link to. /// A destination the text should be linked to.
#[property(skip, referenced)] #[property(skip, referenced)]
pub const LINK: Option<Destination> = None; pub const LINK: Option<Destination> = None;
/// Decorative lines. /// Decorative lines.
@ -168,7 +168,7 @@ impl TextNode {
} }
} }
/// A font family like "Arial". /// A lowercased font family like "arial".
#[derive(Clone, Eq, PartialEq, Hash)] #[derive(Clone, Eq, PartialEq, Hash)]
pub struct FontFamily(EcoString); pub struct FontFamily(EcoString);
@ -338,7 +338,7 @@ impl Resolve for Smart<Hyphenate> {
pub struct StylisticSet(u8); pub struct StylisticSet(u8);
impl StylisticSet { impl StylisticSet {
/// Creates a new set, clamping to 1-20. /// Create a new set, clamping to 1-20.
pub fn new(index: u8) -> Self { pub fn new(index: u8) -> Self {
Self(index.clamp(1, 20)) Self(index.clamp(1, 20))
} }
@ -363,7 +363,7 @@ castable! {
pub enum NumberType { pub enum NumberType {
/// Numbers that fit well with capital text. ("lnum") /// Numbers that fit well with capital text. ("lnum")
Lining, Lining,
/// Numbers that fit well into flow of upper- and lowercase text. ("onum") /// Numbers that fit well into a flow of upper- and lowercase text. ("onum")
OldStyle, OldStyle,
} }
@ -396,28 +396,6 @@ castable! {
}, },
} }
/// How to position numbers.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum NumberPosition {
/// Numbers are positioned on the same baseline as text.
Normal,
/// Numbers are smaller and placed at the bottom. ("subs")
Subscript,
/// Numbers are smaller and placed at the top. ("sups")
Superscript,
}
castable! {
NumberPosition,
Expected: "string",
Value::Str(string) => match string.as_str() {
"normal" => Self::Normal,
"subscript" => Self::Subscript,
"superscript" => Self::Superscript,
_ => Err(r#"expected "normal", "subscript" or "superscript""#)?,
},
}
castable! { castable! {
Vec<(Tag, u32)>, Vec<(Tag, u32)>,
Expected: "array of strings or dictionary mapping tags to integers", Expected: "array of strings or dictionary mapping tags to integers",
@ -445,12 +423,12 @@ impl Fold for Vec<(Tag, u32)> {
} }
} }
/// Convert text to lowercase. /// Convert a string or content to lowercase.
pub fn lower(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn lower(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
case(Case::Lower, args) case(Case::Lower, args)
} }
/// Convert text to uppercase. /// Convert a string or content to uppercase.
pub fn upper(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn upper(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
case(Case::Upper, args) case(Case::Upper, args)
} }
@ -475,7 +453,7 @@ pub enum Case {
} }
impl Case { impl Case {
/// Apply the case to a string of text. /// Apply the case to a string.
pub fn apply(self, text: &str) -> String { pub fn apply(self, text: &str) -> String {
match self { match self {
Self::Upper => text.to_uppercase(), Self::Upper => text.to_uppercase(),

View File

@ -297,7 +297,7 @@ impl Segment<'_> {
/// A prepared item in a paragraph layout. /// A prepared item in a paragraph layout.
#[derive(Debug)] #[derive(Debug)]
enum Item<'a> { enum Item<'a> {
/// A shaped text run with consistent direction. /// A shaped text run with consistent style and direction.
Text(ShapedText<'a>), Text(ShapedText<'a>),
/// Absolute spacing between other items. /// Absolute spacing between other items.
Absolute(Length), Absolute(Length),
@ -305,7 +305,7 @@ enum Item<'a> {
Fractional(Fraction), Fractional(Fraction),
/// A layouted child node. /// A layouted child node.
Frame(Frame), Frame(Frame),
/// A repeating node. /// A repeating node that fills the remaining space.
Repeat(&'a RepeatNode, StyleChain<'a>), Repeat(&'a RepeatNode, StyleChain<'a>),
/// A pin identified by index. /// A pin identified by index.
Pin(usize), Pin(usize),
@ -330,7 +330,7 @@ impl<'a> Item<'a> {
} }
} }
/// The natural width of the item. /// The natural layouted width of the item.
fn width(&self) -> Length { fn width(&self) -> Length {
match self { match self {
Self::Text(shaped) => shaped.width, Self::Text(shaped) => shaped.width,
@ -366,7 +366,7 @@ struct Line<'a> {
last: Option<Item<'a>>, last: Option<Item<'a>>,
/// The width of the line. /// The width of the line.
width: Length, width: Length,
/// Whether the line is allowed to be justified. /// Whether the line should be justified.
justify: bool, justify: bool,
/// Whether the line ends with a hyphen or dash, either naturally or through /// Whether the line ends with a hyphen or dash, either naturally or through
/// hyphenation. /// hyphenation.
@ -403,7 +403,7 @@ impl<'a> Line<'a> {
self.items().skip(start).take(end - start) self.items().skip(start).take(end - start)
} }
// How many justifiable glyphs the line contains. /// How many justifiable glyphs the line contains.
fn justifiables(&self) -> usize { fn justifiables(&self) -> usize {
let mut count = 0; let mut count = 0;
for shaped in self.items().filter_map(Item::text) { for shaped in self.items().filter_map(Item::text) {
@ -528,7 +528,7 @@ fn prepare<'a>(
let mut cursor = 0; let mut cursor = 0;
let mut items = vec![]; let mut items = vec![];
// Layout the children and collect them into items. // Shape / layout the children and collect them into items.
for (segment, styles) in segments { for (segment, styles) in segments {
let end = cursor + segment.len(); let end = cursor + segment.len();
match segment { match segment {
@ -654,7 +654,7 @@ fn linebreak<'a>(
} }
/// Perform line breaking in simple first-fit style. This means that we build /// Perform line breaking in simple first-fit style. This means that we build
/// lines a greedily, always taking the longest possible line. This may lead to /// lines greedily, always taking the longest possible line. This may lead to
/// very unbalanced line, but is fast and simple. /// very unbalanced line, but is fast and simple.
fn linebreak_simple<'a>( fn linebreak_simple<'a>(
p: &'a Preparation<'a>, p: &'a Preparation<'a>,
@ -670,8 +670,8 @@ fn linebreak_simple<'a>(
let mut attempt = line(p, fonts, start .. end, mandatory, hyphen); let mut attempt = line(p, fonts, start .. end, mandatory, hyphen);
// If the line doesn't fit anymore, we push the last fitting attempt // If the line doesn't fit anymore, we push the last fitting attempt
// into the stack and rebuild the line from its end. The resulting // into the stack and rebuild the line from the attempt's end. The
// line cannot be broken up further. // resulting line cannot be broken up further.
if !width.fits(attempt.width) { if !width.fits(attempt.width) {
if let Some((last_attempt, last_end)) = last.take() { if let Some((last_attempt, last_end)) = last.take() {
lines.push(last_attempt); lines.push(last_attempt);
@ -771,17 +771,18 @@ fn linebreak_optimized<'a>(
ratio = ratio.min(10.0); ratio = ratio.min(10.0);
// Determine the cost of the line. // Determine the cost of the line.
let mut cost = if ratio < if attempt.justify { MIN_RATIO } else { 0.0 } { let min_ratio = if attempt.justify { MIN_RATIO } else { 0.0 };
let mut cost = if ratio < min_ratio {
// The line is overfull. This is the case if // The line is overfull. This is the case if
// - justification is on, but we'd need to shrink to much // - justification is on, but we'd need to shrink too much
// - justification is off and the line just doesn't fit // - justification is off and the line just doesn't fit
// Since any longer line will also be overfull, we can deactive // Since any longer line will also be overfull, we can deactive
// this breakpoint. // this breakpoint.
active = i + 1; active = i + 1;
MAX_COST MAX_COST
} else if eof { } else if eof {
// This is the final line and its not overfull since then // This is the final line and its not overfull since then we
// we would have taken the above branch. // would have taken the above branch.
0.0 0.0
} else if mandatory { } else if mandatory {
// This is a mandatory break and the line is not overfull, so it // This is a mandatory break and the line is not overfull, so it

View File

@ -1,6 +1,6 @@
use crate::library::prelude::*; use crate::library::prelude::*;
/// Fill space by repeating something horizontally. /// A node that should be repeated to fill up a line.
#[derive(Debug, Hash)] #[derive(Debug, Hash)]
pub struct RepeatNode(pub LayoutNode); pub struct RepeatNode(pub LayoutNode);

View File

@ -43,7 +43,9 @@ pub struct ShapedGlyph {
pub x_offset: Em, pub x_offset: Em,
/// The vertical offset of the glyph. /// The vertical offset of the glyph.
pub y_offset: Em, pub y_offset: Em,
/// A value that is the same for all glyphs belong to one cluster. /// The byte index in the source text where this glyph's cluster starts. A
/// cluster is a sequence of one or multiple glyphs that cannot be
/// separated and must always be treated as a union.
pub cluster: usize, pub cluster: usize,
/// Whether splitting the shaping result before this glyph would yield the /// Whether splitting the shaping result before this glyph would yield the
/// same results as shaping the parts to both sides of `text_index` /// same results as shaping the parts to both sides of `text_index`
@ -67,9 +69,9 @@ impl ShapedGlyph {
/// A side you can go toward. /// A side you can go toward.
enum Side { enum Side {
/// Go toward the west. /// To the left-hand side.
Left, Left,
/// Go toward the east. /// To the right-hand side.
Right, Right,
} }
@ -141,7 +143,7 @@ impl<'a> ShapedText<'a> {
frame frame
} }
/// Measure the top and bottom extent of a this text. /// Measure the top and bottom extent of this text.
fn measure(&self, fonts: &mut FontStore) -> (Length, Length) { fn measure(&self, fonts: &mut FontStore) -> (Length, Length) {
let mut top = Length::zero(); let mut top = Length::zero();
let mut bottom = Length::zero(); let mut bottom = Length::zero();
@ -498,7 +500,7 @@ fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, face_id: FaceI
} }
} }
/// Apply tracking and spacing to a slice of shaped glyphs. /// Apply tracking and spacing to the shaped glyphs.
fn track_and_space(ctx: &mut ShapingContext) { fn track_and_space(ctx: &mut ShapingContext) {
let tracking = Em::from_length(ctx.styles.get(TextNode::TRACKING), ctx.size); let tracking = Em::from_length(ctx.styles.get(TextNode::TRACKING), ctx.size);
let spacing = ctx let spacing = ctx
@ -522,7 +524,7 @@ fn track_and_space(ctx: &mut ShapingContext) {
} }
} }
/// Resolve the font variant with `STRONG` and `EMPH` factored in. /// Resolve the font variant with `BOLD` and `ITALIC` factored in.
pub fn variant(styles: StyleChain) -> FontVariant { pub fn variant(styles: StyleChain) -> FontVariant {
let mut variant = FontVariant::new( let mut variant = FontVariant::new(
styles.get(TextNode::STYLE), styles.get(TextNode::STYLE),

View File

@ -3,11 +3,12 @@ use crate::font::FontStore;
use crate::library::prelude::*; use crate::library::prelude::*;
use crate::util::EcoString; use crate::util::EcoString;
/// Sub or superscript text. The text is rendered smaller and its baseline is raised. /// Sub or superscript text.
/// ///
/// To provide the best typography possible, we first try to transform the /// The text is rendered smaller and its baseline is raised. To provide the best
/// text to superscript codepoints. If that fails, we fall back to rendering /// typography possible, we first try to transform the text to superscript
/// shrunk normal letters in a raised way. /// codepoints. If that fails, we fall back to rendering shrunk normal letters
/// in a raised way.
#[derive(Debug, Hash)] #[derive(Debug, Hash)]
pub struct ShiftNode<const S: ScriptKind>(pub Content); pub struct ShiftNode<const S: ScriptKind>(pub Content);
@ -19,7 +20,8 @@ pub type SubNode = ShiftNode<SUBSCRIPT>;
#[node] #[node]
impl<const S: ScriptKind> ShiftNode<S> { impl<const S: ScriptKind> ShiftNode<S> {
/// Whether to prefer the dedicated sub- and superscript characters of the font. /// Whether to prefer the dedicated sub- and superscript characters of the
/// font.
pub const TYPOGRAPHIC: bool = true; pub const TYPOGRAPHIC: bool = true;
/// The baseline shift for synthetic sub- and superscripts. /// The baseline shift for synthetic sub- and superscripts.
pub const BASELINE: RawLength = pub const BASELINE: RawLength =
@ -60,9 +62,8 @@ impl<const S: ScriptKind> Show for ShiftNode<S> {
} }
} }
/// Find and transform the text contained in `content` iff it only consists of /// Find and transform the text contained in `content` to the given script kind
/// `Text`, `Space`, and `Empty` leaf nodes. The text is transformed to the /// if and only if it only consists of `Text`, `Space`, and `Empty` leaf nodes.
/// given script kind.
fn search_text(content: &Content, mode: ScriptKind) -> Option<EcoString> { fn search_text(content: &Content, mode: ScriptKind) -> Option<EcoString> {
match content { match content {
Content::Text(_) => { Content::Text(_) => {

View File

@ -2,7 +2,7 @@ use std::cmp::Ordering;
use crate::library::prelude::*; use crate::library::prelude::*;
/// Convert a value to a integer. /// Convert a value to an integer.
pub fn int(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn int(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
let Spanned { v, span } = args.expect("value")?; let Spanned { v, span } = args.expect("value")?;
Ok(Value::Int(match v { Ok(Value::Int(match v {

View File

@ -6,7 +6,7 @@ pub fn repr(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
Ok(args.expect::<Value>("value")?.repr().into()) Ok(args.expect::<Value>("value")?.repr().into())
} }
/// Cconvert a value to a string. /// Convert a value to a string.
pub fn str(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn str(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
let Spanned { v, span } = args.expect("value")?; let Spanned { v, span } = args.expect("value")?;
Ok(Value::Str(match v { Ok(Value::Str(match v {
@ -31,20 +31,20 @@ pub fn regex(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
/// Converts an integer into one or multiple letters. /// Converts an integer into one or multiple letters.
pub fn letter(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn letter(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Letter, args) numbered(Numbering::Letter, args)
} }
/// Converts an integer into a roman numeral. /// Converts an integer into a roman numeral.
pub fn roman(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn roman(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Roman, args) numbered(Numbering::Roman, args)
} }
/// Convert a number into a symbol. /// Convert a number into a symbol.
pub fn symbol(_: &mut Machine, args: &mut Args) -> TypResult<Value> { pub fn symbol(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Symbol, args) numbered(Numbering::Symbol, args)
} }
fn convert(numbering: Numbering, args: &mut Args) -> TypResult<Value> { fn numbered(numbering: Numbering, args: &mut Args) -> TypResult<Value> {
let n = args.expect::<usize>("non-negative integer")?; let n = args.expect::<usize>("non-negative integer")?;
Ok(Value::Str(numbering.apply(n))) Ok(Value::Str(numbering.apply(n)))
} }

View File

@ -80,7 +80,7 @@ mod tests {
let path = Path::new("PTSans.ttf"); let path = Path::new("PTSans.ttf");
let loader = MemLoader::new().with(path, &data[..]); let loader = MemLoader::new().with(path, &data[..]);
// Test that the found was found. // Test that the face was found.
let info = &loader.faces[0]; let info = &loader.faces[0];
assert_eq!(info.path, path); assert_eq!(info.path, path);
assert_eq!(info.index, 0); assert_eq!(info.index, 0);

View File

@ -52,7 +52,7 @@ SUBCOMMANDS:
--fonts List all discovered system fonts --fonts List all discovered system fonts
"; ";
/// Highlight a .typ file into a HTML file. /// Highlight a .typ file into an HTML file.
struct HighlightCommand { struct HighlightCommand {
input: PathBuf, input: PathBuf,
output: PathBuf, output: PathBuf,
@ -72,7 +72,7 @@ OPTIONS:
-h, --help Print this help -h, --help Print this help
"; ";
/// List discovered fonts. /// List discovered system fonts.
struct FontsCommand { struct FontsCommand {
variants: bool, variants: bool,
} }
@ -142,7 +142,7 @@ fn parse_args() -> StrResult<Command> {
} }
/// Parse two freestanding path arguments, with the output path being optional. /// Parse two freestanding path arguments, with the output path being optional.
/// If it is omitted, it is determined from the input path's filename with the /// If it is omitted, it is determined from the input path's file stem plus the
/// given extension. /// given extension.
fn parse_input_output(args: &mut Arguments, ext: &str) -> StrResult<(PathBuf, PathBuf)> { fn parse_input_output(args: &mut Arguments, ext: &str) -> StrResult<(PathBuf, PathBuf)> {
let input: PathBuf = args.free_from_str().map_err(|_| "missing input file")?; let input: PathBuf = args.free_from_str().map_err(|_| "missing input file")?;
@ -229,7 +229,7 @@ fn typeset(command: TypesetCommand) -> StrResult<()> {
Ok(()) Ok(())
} }
/// Print diagnostics messages to the terminal. /// Print diagnostic messages to the terminal.
fn print_diagnostics( fn print_diagnostics(
sources: &SourceStore, sources: &SourceStore,
errors: Vec<Error>, errors: Vec<Error>,

View File

@ -14,7 +14,7 @@ thread_local! {
/// A map from hashes to cache entries. /// A map from hashes to cache entries.
type Cache = HashMap<u64, CacheEntry>; type Cache = HashMap<u64, CacheEntry>;
/// Access the cache. /// Access the cache mutably.
fn with<F, R>(f: F) -> R fn with<F, R>(f: F) -> R
where where
F: FnOnce(&mut Cache) -> R, F: FnOnce(&mut Cache) -> R,
@ -24,7 +24,8 @@ where
/// An entry in the cache. /// An entry in the cache.
struct CacheEntry { struct CacheEntry {
/// The memoized function's result plus constraints on the input. /// The memoized function's result plus constraints on the input in the form
/// `(O, I::Contrast)`.
data: Box<dyn Any>, data: Box<dyn Any>,
/// How many evictions have passed since the entry has been last used. /// How many evictions have passed since the entry has been last used.
age: usize, age: usize,
@ -32,9 +33,9 @@ struct CacheEntry {
/// Execute a memoized function call. /// Execute a memoized function call.
/// ///
/// This hashes all inputs to the function and then either returns a cached /// This [tracks](Track) all inputs to the function and then either returns a
/// version from the thread-local cache or executes the function and saves a /// cached version from the thread-local cache or executes the function and
/// copy of the results in the cache. /// saves a copy of the results in the cache.
/// ///
/// Note that `f` must be a pure function. /// Note that `f` must be a pure function.
pub fn memoized<I, O>(input: I, f: fn(input: I) -> (O, I::Constraint)) -> O pub fn memoized<I, O>(input: I, f: fn(input: I) -> (O, I::Constraint)) -> O
@ -48,7 +49,7 @@ where
/// Execute a function and then call another function with a reference to the /// Execute a function and then call another function with a reference to the
/// result. /// result.
/// ///
/// This hashes all inputs to the function and then either /// This [tracks](Track) all inputs to the function and then either
/// - calls `g` with a cached version from the thread-local cache, /// - calls `g` with a cached version from the thread-local cache,
/// - or executes `f`, calls `g` with the fresh version and saves the result in /// - or executes `f`, calls `g` with the fresh version and saves the result in
/// the cache. /// the cache.

View File

@ -154,7 +154,7 @@ impl Content {
Self::Show(node.pack(), None) Self::Show(node.pack(), None)
} }
/// Create a new sequence nodes from multiples nodes. /// Create a new sequence node from multiples nodes.
pub fn sequence(seq: Vec<Self>) -> Self { pub fn sequence(seq: Vec<Self>) -> Self {
match seq.as_slice() { match seq.as_slice() {
[] => Self::Empty, [] => Self::Empty,
@ -204,7 +204,7 @@ impl Content {
Self::Styled(Arc::new((self, styles))) Self::Styled(Arc::new((self, styles)))
} }
/// Assign a role to this content by adding a style map. /// Assign a semantic role to this content.
pub fn role(self, role: Role) -> Self { pub fn role(self, role: Role) -> Self {
self.styled_with_entry(StyleEntry::Role(role)) self.styled_with_entry(StyleEntry::Role(role))
} }

View File

@ -19,7 +19,7 @@ use crate::Context;
/// A node that can be layouted into a sequence of regions. /// A node that can be layouted into a sequence of regions.
/// ///
/// Layouting return one frame per used region. /// Layouting returns one frame per used region.
pub trait Layout: 'static { pub trait Layout: 'static {
/// Layout this node into the given regions, producing frames. /// Layout this node into the given regions, producing frames.
fn layout( fn layout(
@ -377,7 +377,7 @@ impl Layout for SizedNode {
struct FillNode { struct FillNode {
/// How to fill the frames resulting from the `child`. /// How to fill the frames resulting from the `child`.
fill: Paint, fill: Paint,
/// The node to fill. /// The node whose frames should be filled.
child: LayoutNode, child: LayoutNode,
} }
@ -402,7 +402,7 @@ impl Layout for FillNode {
struct StrokeNode { struct StrokeNode {
/// How to stroke the frames resulting from the `child`. /// How to stroke the frames resulting from the `child`.
stroke: Stroke, stroke: Stroke,
/// The node to stroke. /// The node whose frames should be stroked.
child: LayoutNode, child: LayoutNode,
} }

View File

@ -1,6 +1,7 @@
use std::cell::Cell; use std::cell::Cell;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::num::NonZeroUsize;
use std::sync::Arc; use std::sync::Arc;
use super::Content; use super::Content;
@ -55,7 +56,7 @@ impl LocateNode {
Self(Arc::new(Repr::Entry(EntryNode { group, recipe, value }))) Self(Arc::new(Repr::Entry(EntryNode { group, recipe, value })))
} }
/// Create a new node with access to a group's members. /// Create a new node with access to all of a group's members.
pub fn all(group: Group, recipe: Spanned<Func>) -> Self { pub fn all(group: Group, recipe: Spanned<Func>) -> Self {
Self(Arc::new(Repr::All(AllNode { group, recipe }))) Self(Arc::new(Repr::All(AllNode { group, recipe })))
} }
@ -278,7 +279,7 @@ impl PinBoard {
locate_in_frame( locate_in_frame(
&mut self.list, &mut self.list,
&mut flow, &mut flow,
1 + i, NonZeroUsize::new(1 + i).unwrap(),
frame, frame,
Transform::identity(), Transform::identity(),
); );
@ -295,7 +296,7 @@ impl PinBoard {
fn locate_in_frame( fn locate_in_frame(
pins: &mut [Pin], pins: &mut [Pin],
flow: &mut usize, flow: &mut usize,
page: usize, page: NonZeroUsize,
frame: &Frame, frame: &Frame,
ts: Transform, ts: Transform,
) { ) {
@ -384,7 +385,10 @@ impl Pin {
impl Default for Pin { impl Default for Pin {
fn default() -> Self { fn default() -> Self {
Self { Self {
loc: Location { page: 0, pos: Point::zero() }, loc: Location {
page: NonZeroUsize::new(1).unwrap(),
pos: Point::zero(),
},
flow: 0, flow: 0,
group: None, group: None,
value: None, value: None,

View File

@ -18,7 +18,7 @@ pub struct Property {
pub key: KeyId, pub key: KeyId,
/// The id of the node the property belongs to. /// The id of the node the property belongs to.
pub node: NodeId, pub node: NodeId,
/// Whether the property should only affects the first node down the /// Whether the property should only affect the first node down the
/// hierarchy. Used by constructors. /// hierarchy. Used by constructors.
pub scoped: bool, pub scoped: bool,
/// The property's value. /// The property's value.
@ -143,10 +143,10 @@ pub trait Key<'a>: Copy + 'static {
/// The name of the property, used for debug printing. /// The name of the property, used for debug printing.
const NAME: &'static str; const NAME: &'static str;
/// The ids of the key and of the node the key belongs to. /// The id of the node the key belongs to.
fn node() -> NodeId; fn node() -> NodeId;
/// Compute an output value from a sequence of values belong to this key, /// Compute an output value from a sequence of values belonging to this key,
/// folding if necessary. /// folding if necessary.
fn get( fn get(
chain: StyleChain<'a>, chain: StyleChain<'a>,

View File

@ -216,7 +216,7 @@ impl StyleEntry {
} }
} }
/// The highest-level kind of of structure the entry interrupts. /// The highest-level kind of structure the entry interrupts.
pub fn interruption(&self) -> Option<Interruption> { pub fn interruption(&self) -> Option<Interruption> {
match self { match self {
Self::Property(property) => property.interruption(), Self::Property(property) => property.interruption(),
@ -328,7 +328,7 @@ impl<'a> StyleChain<'a> {
Ok(realized) Ok(realized)
} }
/// Retrieve the current role /// Retrieve the current role.
pub fn role(self) -> Option<Role> { pub fn role(self) -> Option<Role> {
let mut depth = 0; let mut depth = 0;
@ -522,6 +522,15 @@ impl<T> StyleVec<T> {
} }
} }
/// Iterate over references to the contained items and associated style maps.
pub fn iter(&self) -> impl Iterator<Item = (&T, &StyleMap)> + '_ {
self.items().zip(
self.maps
.iter()
.flat_map(|(map, count)| iter::repeat(map).take(*count)),
)
}
/// Iterate over the contained items. /// Iterate over the contained items.
pub fn items(&self) -> std::slice::Iter<'_, T> { pub fn items(&self) -> std::slice::Iter<'_, T> {
self.items.iter() self.items.iter()
@ -535,15 +544,6 @@ impl<T> StyleVec<T> {
pub fn styles(&self) -> impl Iterator<Item = &StyleMap> { pub fn styles(&self) -> impl Iterator<Item = &StyleMap> {
self.maps.iter().map(|(map, _)| map) self.maps.iter().map(|(map, _)| map)
} }
/// Iterate over references to the contained items and associated style maps.
pub fn iter(&self) -> impl Iterator<Item = (&T, &StyleMap)> + '_ {
self.items().zip(
self.maps
.iter()
.flat_map(|(map, count)| iter::repeat(map).take(*count)),
)
}
} }
impl<T> Default for StyleVec<T> { impl<T> Default for StyleVec<T> {

View File

@ -54,7 +54,7 @@ impl Reparser<'_> {
outermost: bool, outermost: bool,
safe_to_replace: bool, safe_to_replace: bool,
) -> Option<Range<usize>> { ) -> Option<Range<usize>> {
let is_markup = matches!(node.kind(), NodeKind::Markup(_)); let is_markup = matches!(node.kind(), NodeKind::Markup { .. });
let original_count = node.children().len(); let original_count = node.children().len();
let original_offset = offset; let original_offset = offset;
@ -96,9 +96,8 @@ impl Reparser<'_> {
} else { } else {
// Update compulsary state of `ahead_nontrivia`. // Update compulsary state of `ahead_nontrivia`.
if let Some(ahead_nontrivia) = ahead.as_mut() { if let Some(ahead_nontrivia) = ahead.as_mut() {
match child.kind() { if let NodeKind::Space { newlines: (1 ..) } = child.kind() {
NodeKind::Space(n) if n > &0 => ahead_nontrivia.newline(), ahead_nontrivia.newline();
_ => {}
} }
} }
@ -156,7 +155,6 @@ impl Reparser<'_> {
// Do not allow replacement of elements inside of constructs whose // Do not allow replacement of elements inside of constructs whose
// opening and closing brackets look the same. // opening and closing brackets look the same.
let safe_inside = node.kind().is_bounded(); let safe_inside = node.kind().is_bounded();
let child = &mut node.children_mut()[pos.idx]; let child = &mut node.children_mut()[pos.idx];
let prev_len = child.len(); let prev_len = child.len();
let prev_descendants = child.descendants(); let prev_descendants = child.descendants();
@ -200,8 +198,8 @@ impl Reparser<'_> {
// Make sure this is a markup node and that we may replace. If so, save // Make sure this is a markup node and that we may replace. If so, save
// the current indent. // the current indent.
let indent = match node.kind() { let min_indent = match node.kind() {
NodeKind::Markup(n) if safe_to_replace => *n, NodeKind::Markup { min_indent } if safe_to_replace => *min_indent,
_ => return None, _ => return None,
}; };
@ -220,7 +218,7 @@ impl Reparser<'_> {
self.replace( self.replace(
node, node,
ReparseMode::MarkupElements(at_start, indent), ReparseMode::MarkupElements { at_start, min_indent },
start.idx .. end.idx + 1, start.idx .. end.idx + 1,
superseded_span, superseded_span,
outermost, outermost,
@ -261,15 +259,17 @@ impl Reparser<'_> {
&self.src[newborn_span.start ..], &self.src[newborn_span.start ..],
newborn_span.len(), newborn_span.len(),
), ),
ReparseMode::MarkupElements(at_start, indent) => reparse_markup_elements( ReparseMode::MarkupElements { at_start, min_indent } => {
&prefix, reparse_markup_elements(
&self.src[newborn_span.start ..], &prefix,
newborn_span.len(), &self.src[newborn_span.start ..],
differential, newborn_span.len(),
&node.children().as_slice()[superseded_start ..], differential,
at_start, &node.children().as_slice()[superseded_start ..],
indent, at_start,
), min_indent,
)
}
}?; }?;
// Do not accept unclosed nodes if the old node wasn't at the right edge // Do not accept unclosed nodes if the old node wasn't at the right edge
@ -294,12 +294,12 @@ struct NodePos {
offset: usize, offset: usize,
} }
/// Encodes the state machine of the search for the node which is pending for /// Encodes the state machine of the search for the nodes are pending for
/// replacement. /// replacement.
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
enum SearchState { enum SearchState {
/// Neither an end nor a start have been found as of now. /// Neither an end nor a start have been found as of now.
/// The last non-whitespace child is continually saved. /// The latest non-trivia child is continually saved.
NoneFound, NoneFound,
/// The search has concluded by finding a node that fully contains the /// The search has concluded by finding a node that fully contains the
/// modifications. /// modifications.
@ -332,15 +332,18 @@ impl SearchState {
} }
} }
/// An ahead element with an index and whether it is `at_start`. /// An ahead node with an index and whether it is `at_start`.
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
struct Ahead { struct Ahead {
/// The position of the node.
pos: NodePos, pos: NodePos,
/// The `at_start` before this node.
at_start: bool, at_start: bool,
/// The kind of ahead node.
kind: AheadKind, kind: AheadKind,
} }
/// The kind of ahead element. /// The kind of ahead node.
#[derive(Clone, Copy, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
enum AheadKind { enum AheadKind {
/// A normal non-trivia child has been found. /// A normal non-trivia child has been found.
@ -382,9 +385,9 @@ enum ReparseMode {
Code, Code,
/// Reparse a content block, including its square brackets. /// Reparse a content block, including its square brackets.
Content, Content,
/// Reparse elements of the markup. The variant carries whether the node is /// Reparse elements of the markup. Also specified the initial `at_start`
/// `at_start` and the minimum indent of the containing markup node. /// state for the reparse and the minimum indent of the reparsed nodes.
MarkupElements(bool, usize), MarkupElements { at_start: bool, min_indent: usize },
} }
#[cfg(test)] #[cfg(test)]

View File

@ -77,7 +77,7 @@ fn reparse_content_block(
Some((vec![first], terminated, 1)) Some((vec![first], terminated, 1))
} }
/// Reparse some markup elements without the topmost node. /// Reparse a sequence markup elements without the topmost node.
/// ///
/// Returns `Some` if all of the input was consumed. /// Returns `Some` if all of the input was consumed.
fn reparse_markup_elements( fn reparse_markup_elements(
@ -87,7 +87,7 @@ fn reparse_markup_elements(
differential: isize, differential: isize,
reference: &[SyntaxNode], reference: &[SyntaxNode],
mut at_start: bool, mut at_start: bool,
column: usize, min_indent: usize,
) -> Option<(Vec<SyntaxNode>, bool, usize)> { ) -> Option<(Vec<SyntaxNode>, bool, usize)> {
let mut p = Parser::with_prefix(prefix, src, TokenMode::Markup); let mut p = Parser::with_prefix(prefix, src, TokenMode::Markup);
@ -98,8 +98,8 @@ fn reparse_markup_elements(
let mut stopped = false; let mut stopped = false;
'outer: while !p.eof() { 'outer: while !p.eof() {
if let Some(NodeKind::Space(1 ..)) = p.peek() { if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
if p.column(p.current_end()) < column { if p.column(p.current_end()) < min_indent {
return None; return None;
} }
} }
@ -155,7 +155,7 @@ fn reparse_markup_elements(
/// If `at_start` is true, things like headings that may only appear at the /// If `at_start` is true, things like headings that may only appear at the
/// beginning of a line or content block are initially allowed. /// beginning of a line or content block are initially allowed.
fn markup(p: &mut Parser, mut at_start: bool) { fn markup(p: &mut Parser, mut at_start: bool) {
p.perform(NodeKind::Markup(0), |p| { p.perform(NodeKind::Markup { min_indent: 0 }, |p| {
while !p.eof() { while !p.eof() {
markup_node(p, &mut at_start); markup_node(p, &mut at_start);
} }
@ -168,18 +168,18 @@ fn markup_line(p: &mut Parser) {
} }
/// Parse markup that stays right of the given `column`. /// Parse markup that stays right of the given `column`.
fn markup_indented(p: &mut Parser, column: usize) { fn markup_indented(p: &mut Parser, min_indent: usize) {
p.eat_while(|t| match t { p.eat_while(|t| match t {
NodeKind::Space(n) => *n == 0, NodeKind::Space { newlines } => *newlines == 0,
NodeKind::LineComment | NodeKind::BlockComment => true, NodeKind::LineComment | NodeKind::BlockComment => true,
_ => false, _ => false,
}); });
let mut at_start = false; let mut at_start = false;
p.perform(NodeKind::Markup(column), |p| { p.perform(NodeKind::Markup { min_indent }, |p| {
while !p.eof() { while !p.eof() {
if let Some(NodeKind::Space(1 ..)) = p.peek() { if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
if p.column(p.current_end()) < column { if p.column(p.current_end()) < min_indent {
break; break;
} }
} }
@ -198,7 +198,7 @@ fn markup_node(p: &mut Parser, at_start: &mut bool) {
match token { match token {
// Whitespace. // Whitespace.
NodeKind::Space(newlines) => { NodeKind::Space { newlines } => {
*at_start |= *newlines > 0; *at_start |= *newlines > 0;
p.eat(); p.eat();
return; return;
@ -284,7 +284,7 @@ fn heading(p: &mut Parser, at_start: bool) {
while p.eat_if(NodeKind::Eq) {} while p.eat_if(NodeKind::Eq) {}
if at_start && p.peek().map_or(true, |kind| kind.is_space()) { if at_start && p.peek().map_or(true, |kind| kind.is_space()) {
p.eat_while(|kind| kind == &NodeKind::Space(0)); p.eat_while(|kind| *kind == NodeKind::Space { newlines: 0 });
markup_line(p); markup_line(p);
marker.end(p, NodeKind::Heading); marker.end(p, NodeKind::Heading);
} else { } else {
@ -299,9 +299,9 @@ fn list_node(p: &mut Parser, at_start: bool) {
let text: EcoString = p.peek_src().into(); let text: EcoString = p.peek_src().into();
p.assert(NodeKind::Minus); p.assert(NodeKind::Minus);
let column = p.column(p.prev_end()); let min_indent = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space(0)) && !p.eof() { if at_start && p.eat_if(NodeKind::Space { newlines: 0 }) && !p.eof() {
markup_indented(p, column); markup_indented(p, min_indent);
marker.end(p, NodeKind::List); marker.end(p, NodeKind::List);
} else { } else {
marker.convert(p, NodeKind::Text(text)); marker.convert(p, NodeKind::Text(text));
@ -314,16 +314,16 @@ fn enum_node(p: &mut Parser, at_start: bool) {
let text: EcoString = p.peek_src().into(); let text: EcoString = p.peek_src().into();
p.eat(); p.eat();
let column = p.column(p.prev_end()); let min_indent = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space(0)) && !p.eof() { if at_start && p.eat_if(NodeKind::Space { newlines: 0 }) && !p.eof() {
markup_indented(p, column); markup_indented(p, min_indent);
marker.end(p, NodeKind::Enum); marker.end(p, NodeKind::Enum);
} else { } else {
marker.convert(p, NodeKind::Text(text)); marker.convert(p, NodeKind::Text(text));
} }
} }
/// Parse an expression within markup mode. /// Parse an expression within a markup mode.
fn markup_expr(p: &mut Parser) { fn markup_expr(p: &mut Parser) {
// Does the expression need termination or can content follow directly? // Does the expression need termination or can content follow directly?
let stmt = matches!( let stmt = matches!(
@ -556,10 +556,10 @@ fn parenthesized(p: &mut Parser, atomic: bool) -> ParseResult {
enum CollectionKind { enum CollectionKind {
/// The collection is only one item and has no comma. /// The collection is only one item and has no comma.
Group, Group,
/// The collection starts with a positional and has more items or a trailing /// The collection starts with a positional item and has multiple items or a
/// comma. /// trailing comma.
Positional, Positional,
/// The collection starts with a named item. /// The collection starts with a colon or named item.
Named, Named,
} }
@ -672,7 +672,7 @@ fn array(p: &mut Parser, marker: Marker) {
} }
/// Convert a collection into a dictionary, producing errors for anything other /// Convert a collection into a dictionary, producing errors for anything other
/// than named pairs. /// than named and keyed pairs.
fn dict(p: &mut Parser, marker: Marker) { fn dict(p: &mut Parser, marker: Marker) {
let mut used = HashSet::new(); let mut used = HashSet::new();
marker.filter_children(p, |x| match x.kind() { marker.filter_children(p, |x| match x.kind() {
@ -731,11 +731,11 @@ fn code(p: &mut Parser) {
p.end_group(); p.end_group();
// Forcefully skip over newlines since the group's contents can't. // Forcefully skip over newlines since the group's contents can't.
p.eat_while(|t| matches!(t, NodeKind::Space(_))); p.eat_while(NodeKind::is_space);
} }
} }
// Parse a content block: `[...]`. /// Parse a content block: `[...]`.
fn content_block(p: &mut Parser) { fn content_block(p: &mut Parser) {
p.perform(NodeKind::ContentBlock, |p| { p.perform(NodeKind::ContentBlock, |p| {
p.start_group(Group::Bracket); p.start_group(Group::Bracket);
@ -857,7 +857,7 @@ fn wrap_expr(p: &mut Parser) -> ParseResult {
}) })
} }
/// Parse an if expresion. /// Parse an if-else expresion.
fn if_expr(p: &mut Parser) -> ParseResult { fn if_expr(p: &mut Parser) -> ParseResult {
p.perform(NodeKind::IfExpr, |p| { p.perform(NodeKind::IfExpr, |p| {
p.assert(NodeKind::If); p.assert(NodeKind::If);
@ -886,7 +886,7 @@ fn while_expr(p: &mut Parser) -> ParseResult {
}) })
} }
/// Parse a for expression. /// Parse a for-in expression.
fn for_expr(p: &mut Parser) -> ParseResult { fn for_expr(p: &mut Parser) -> ParseResult {
p.perform(NodeKind::ForExpr, |p| { p.perform(NodeKind::ForExpr, |p| {
p.assert(NodeKind::For); p.assert(NodeKind::For);

View File

@ -24,7 +24,7 @@ pub struct Parser<'s> {
children: Vec<SyntaxNode>, children: Vec<SyntaxNode>,
/// Whether the last group was not correctly terminated. /// Whether the last group was not correctly terminated.
unterminated_group: bool, unterminated_group: bool,
/// Whether a group terminator was found, that did not close a group. /// Whether a group terminator was found that did not close a group.
stray_terminator: bool, stray_terminator: bool,
} }
@ -58,9 +58,10 @@ impl<'s> Parser<'s> {
self.children self.children
} }
/// End the parsing process and return the parsed children and whether the /// End the parsing process and return
/// last token was terminated if all groups were terminated correctly or /// - the parsed children and whether the last token was terminated, if all
/// `None` otherwise. /// groups were terminated correctly, or
/// - `None` otherwise.
pub fn consume(self) -> Option<(Vec<SyntaxNode>, bool)> { pub fn consume(self) -> Option<(Vec<SyntaxNode>, bool)> {
self.terminated().then(|| (self.children, self.tokens.terminated())) self.terminated().then(|| (self.children, self.tokens.terminated()))
} }
@ -131,7 +132,7 @@ impl<'s> Parser<'s> {
self.repeek(); self.repeek();
} }
/// Eat if the current token it is the given one. /// Consume the current token if it is the given one.
pub fn eat_if(&mut self, kind: NodeKind) -> bool { pub fn eat_if(&mut self, kind: NodeKind) -> bool {
let at = self.at(kind); let at = self.at(kind);
if at { if at {
@ -150,7 +151,8 @@ impl<'s> Parser<'s> {
} }
} }
/// Eat if the current token is the given one and produce an error if not. /// Consume the current token if it is the given one and produce an error if
/// not.
pub fn expect(&mut self, kind: NodeKind) -> ParseResult { pub fn expect(&mut self, kind: NodeKind) -> ParseResult {
let at = self.peek() == Some(&kind); let at = self.peek() == Some(&kind);
if at { if at {
@ -162,7 +164,7 @@ impl<'s> Parser<'s> {
} }
} }
/// Eat, debug-asserting that the token is the given one. /// Consume the current token, debug-asserting that it is the given one.
#[track_caller] #[track_caller]
pub fn assert(&mut self, kind: NodeKind) { pub fn assert(&mut self, kind: NodeKind) {
debug_assert_eq!(self.peek(), Some(&kind)); debug_assert_eq!(self.peek(), Some(&kind));
@ -179,8 +181,8 @@ impl<'s> Parser<'s> {
if self.eof { None } else { self.current.as_ref() } if self.eof { None } else { self.current.as_ref() }
} }
/// Peek at the current token, if it follows immediately after the last one /// Peek at the current token, but only if it follows immediately after the
/// without any trivia in between. /// last one without any trivia in between.
pub fn peek_direct(&self) -> Option<&NodeKind> { pub fn peek_direct(&self) -> Option<&NodeKind> {
if self.prev_end() == self.current_start() { if self.prev_end() == self.current_start() {
self.peek() self.peek()
@ -267,9 +269,9 @@ impl<'s> Parser<'s> {
Group::Imports => None, Group::Imports => None,
} { } {
if self.current.as_ref() == Some(&end) { if self.current.as_ref() == Some(&end) {
// If another group closes after a group with the missing terminator, // If another group closes after a group with the missing
// its scope of influence ends here and no longer taints the rest of the // terminator, its scope of influence ends here and no longer
// reparse. // taints the rest of the reparse.
self.unterminated_group = false; self.unterminated_group = false;
// Bump the delimeter and return. No need to rescan in this // Bump the delimeter and return. No need to rescan in this
@ -330,7 +332,7 @@ impl<'s> Parser<'s> {
Some(NodeKind::Underscore) => self.inside(Group::Emph), Some(NodeKind::Underscore) => self.inside(Group::Emph),
Some(NodeKind::Semicolon) => self.inside(Group::Expr), Some(NodeKind::Semicolon) => self.inside(Group::Expr),
Some(NodeKind::From) => self.inside(Group::Imports), Some(NodeKind::From) => self.inside(Group::Imports),
Some(NodeKind::Space(n)) => self.space_ends_group(*n), Some(NodeKind::Space { newlines }) => self.space_ends_group(*newlines),
Some(_) => false, Some(_) => false,
None => true, None => true,
}; };
@ -339,7 +341,7 @@ impl<'s> Parser<'s> {
/// Returns whether the given type can be skipped over. /// Returns whether the given type can be skipped over.
fn is_trivia(&self, token: &NodeKind) -> bool { fn is_trivia(&self, token: &NodeKind) -> bool {
match token { match token {
NodeKind::Space(n) => !self.space_ends_group(*n), NodeKind::Space { newlines } => !self.space_ends_group(*newlines),
NodeKind::LineComment => true, NodeKind::LineComment => true,
NodeKind::BlockComment => true, NodeKind::BlockComment => true,
_ => false, _ => false,
@ -491,8 +493,8 @@ impl Marker {
/// A logical group of tokens, e.g. `[...]`. /// A logical group of tokens, e.g. `[...]`.
#[derive(Debug)] #[derive(Debug)]
struct GroupEntry { struct GroupEntry {
/// The kind of group this is. This decides which tokens will end the group. /// The kind of group this is. This decides which token(s) will end the
/// For example, a [`Group::Paren`] will be ended by /// group. For example, a [`Group::Paren`] will be ended by
/// [`Token::RightParen`]. /// [`Token::RightParen`].
pub kind: Group, pub kind: Group,
/// The mode the parser was in _before_ the group started (to which we go /// The mode the parser was in _before_ the group started (to which we go

View File

@ -47,7 +47,7 @@ pub fn resolve_hex(sequence: &str) -> Option<char> {
u32::from_str_radix(sequence, 16).ok().and_then(std::char::from_u32) u32::from_str_radix(sequence, 16).ok().and_then(std::char::from_u32)
} }
/// Resolve the language tag and trims the raw text. /// Resolve the language tag and trim the raw text.
pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawNode { pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawNode {
if backticks > 1 { if backticks > 1 {
let (tag, inner) = split_at_lang_tag(text); let (tag, inner) = split_at_lang_tag(text);
@ -77,7 +77,7 @@ fn split_at_lang_tag(raw: &str) -> (&str, &str) {
/// Trim raw text and splits it into lines. /// Trim raw text and splits it into lines.
/// ///
/// Returns whether at least one newline was contained in `raw`. /// Also returns whether at least one newline was contained in `raw`.
fn trim_and_split_raw(column: usize, mut raw: &str) -> (String, bool) { fn trim_and_split_raw(column: usize, mut raw: &str) -> (String, bool) {
// Trims one space at the start. // Trims one space at the start.
raw = raw.strip_prefix(' ').unwrap_or(raw); raw = raw.strip_prefix(' ').unwrap_or(raw);

View File

@ -110,7 +110,9 @@ impl<'s> Iterator for Tokens<'s> {
']' => NodeKind::RightBracket, ']' => NodeKind::RightBracket,
// Whitespace. // Whitespace.
' ' if self.s.done() || !self.s.at(char::is_whitespace) => NodeKind::Space(0), ' ' if self.s.done() || !self.s.at(char::is_whitespace) => {
NodeKind::Space { newlines: 0 }
}
c if c.is_whitespace() => self.whitespace(), c if c.is_whitespace() => self.whitespace(),
// Comments with special case for URLs. // Comments with special case for URLs.
@ -260,7 +262,7 @@ impl<'s> Tokens<'s> {
} }
} }
NodeKind::Space(newlines) NodeKind::Space { newlines }
} }
fn backslash(&mut self) -> NodeKind { fn backslash(&mut self) -> NodeKind {
@ -681,8 +683,8 @@ mod tests {
use SpanPos::*; use SpanPos::*;
use TokenMode::{Code, Markup}; use TokenMode::{Code, Markup};
fn Error(pos: SpanPos, message: &str) -> NodeKind { fn Space(newlines: usize) -> NodeKind {
NodeKind::Error(pos, message.into()) NodeKind::Space { newlines }
} }
fn Raw(text: &str, lang: Option<&str>, block: bool) -> NodeKind { fn Raw(text: &str, lang: Option<&str>, block: bool) -> NodeKind {
@ -709,6 +711,10 @@ mod tests {
NodeKind::Ident(ident.into()) NodeKind::Ident(ident.into())
} }
fn Error(pos: SpanPos, message: &str) -> NodeKind {
NodeKind::Error(pos, message.into())
}
fn Invalid(invalid: &str) -> NodeKind { fn Invalid(invalid: &str) -> NodeKind {
NodeKind::Unknown(invalid.into()) NodeKind::Unknown(invalid.into())
} }

View File

@ -107,7 +107,7 @@ impl SourceStore {
return id; return id;
} }
// No existing file yet. // No existing file yet, so we allocate a new id.
let id = SourceId(self.sources.len() as u16); let id = SourceId(self.sources.len() as u16);
self.sources.push(SourceFile::new(id, path, src)); self.sources.push(SourceFile::new(id, path, src));
@ -166,8 +166,9 @@ pub struct SourceFile {
impl SourceFile { impl SourceFile {
/// Create a new source file. /// Create a new source file.
pub fn new(id: SourceId, path: &Path, src: String) -> Self { pub fn new(id: SourceId, path: &Path, src: String) -> Self {
let mut lines = vec![Line { byte_idx: 0, utf16_idx: 0 }]; let lines = std::iter::once(Line { byte_idx: 0, utf16_idx: 0 })
lines.extend(Line::iter(0, 0, &src)); .chain(lines(0, 0, &src))
.collect();
let mut root = parse(&src); let mut root = parse(&src);
root.numberize(id, Span::FULL).unwrap(); root.numberize(id, Span::FULL).unwrap();
@ -242,7 +243,7 @@ impl SourceFile {
pub fn replace(&mut self, src: String) { pub fn replace(&mut self, src: String) {
self.src = src; self.src = src;
self.lines = vec![Line { byte_idx: 0, utf16_idx: 0 }]; self.lines = vec![Line { byte_idx: 0, utf16_idx: 0 }];
self.lines.extend(Line::iter(0, 0, &self.src)); self.lines.extend(lines(0, 0, &self.src));
self.root = parse(&self.src); self.root = parse(&self.src);
self.root.numberize(self.id(), Span::FULL).unwrap(); self.root.numberize(self.id(), Span::FULL).unwrap();
self.rev = self.rev.wrapping_add(1); self.rev = self.rev.wrapping_add(1);
@ -271,22 +272,19 @@ impl SourceFile {
} }
// Recalculate the line starts after the edit. // Recalculate the line starts after the edit.
self.lines.extend(Line::iter( self.lines
start_byte, .extend(lines(start_byte, start_utf16, &self.src[start_byte ..]));
start_utf16,
&self.src[start_byte ..],
));
// Incrementally reparse the replaced range. // Incrementally reparse the replaced range.
reparse(&mut self.root, &self.src, replace, with.len()) reparse(&mut self.root, &self.src, replace, with.len())
} }
/// Get the length of the file in bytes. /// Get the length of the file in UTF-8 encoded bytes.
pub fn len_bytes(&self) -> usize { pub fn len_bytes(&self) -> usize {
self.src.len() self.src.len()
} }
/// Get the length of the file in UTF16 code units. /// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize { pub fn len_utf16(&self) -> usize {
let last = self.lines.last().unwrap(); let last = self.lines.last().unwrap();
last.utf16_idx + self.src[last.byte_idx ..].len_utf16() last.utf16_idx + self.src[last.byte_idx ..].len_utf16()
@ -396,56 +394,48 @@ struct Line {
utf16_idx: usize, utf16_idx: usize,
} }
impl Line { /// Iterate over the lines in the string.
/// Iterate over the lines in the string. fn lines(
fn iter( byte_offset: usize,
byte_offset: usize, utf16_offset: usize,
utf16_offset: usize, string: &str,
string: &str, ) -> impl Iterator<Item = Line> + '_ {
) -> impl Iterator<Item = Line> + '_ { let mut s = Scanner::new(string);
let mut s = Scanner::new(string); let mut utf16_idx = utf16_offset;
let mut utf16_idx = utf16_offset;
std::iter::from_fn(move || { std::iter::from_fn(move || {
s.eat_until(|c: char| { s.eat_until(|c: char| {
utf16_idx += c.len_utf16(); utf16_idx += c.len_utf16();
is_newline(c) is_newline(c)
}); });
if s.done() { if s.done() {
return None; return None;
} }
if s.eat() == Some('\r') && s.eat_if('\n') { if s.eat() == Some('\r') && s.eat_if('\n') {
utf16_idx += 1; utf16_idx += 1;
} }
Some(Line { Some(Line {
byte_idx: byte_offset + s.cursor(), byte_idx: byte_offset + s.cursor(),
utf16_idx, utf16_idx,
})
}) })
} })
}
impl AsRef<str> for SourceFile {
fn as_ref(&self) -> &str {
&self.src
}
} }
#[cfg(feature = "codespan-reporting")] #[cfg(feature = "codespan-reporting")]
impl<'a> Files<'a> for SourceStore { impl<'a> Files<'a> for SourceStore {
type FileId = SourceId; type FileId = SourceId;
type Name = std::path::Display<'a>; type Name = std::path::Display<'a>;
type Source = &'a SourceFile; type Source = &'a str;
fn name(&'a self, id: SourceId) -> Result<Self::Name, files::Error> { fn name(&'a self, id: SourceId) -> Result<Self::Name, files::Error> {
Ok(self.get(id).path().display()) Ok(self.get(id).path().display())
} }
fn source(&'a self, id: SourceId) -> Result<Self::Source, files::Error> { fn source(&'a self, id: SourceId) -> Result<Self::Source, files::Error> {
Ok(self.get(id)) Ok(self.get(id).src())
} }
fn line_index(&'a self, id: SourceId, given: usize) -> Result<usize, files::Error> { fn line_index(&'a self, id: SourceId, given: usize) -> Result<usize, files::Error> {
@ -571,6 +561,7 @@ mod tests {
let result = SourceFile::detached(after); let result = SourceFile::detached(after);
source.edit(range, with); source.edit(range, with);
assert_eq!(source.src, result.src); assert_eq!(source.src, result.src);
assert_eq!(source.root, result.root);
assert_eq!(source.lines, result.lines); assert_eq!(source.lines, result.lines);
} }

View File

@ -54,15 +54,15 @@ macro_rules! node {
node! { node! {
/// The syntactical root capable of representing a full parsed document. /// The syntactical root capable of representing a full parsed document.
Markup: NodeKind::Markup(_) Markup: NodeKind::Markup { .. }
} }
impl Markup { impl Markup {
/// The markup nodes. /// The markup nodes.
pub fn nodes(&self) -> impl Iterator<Item = MarkupNode> + '_ { pub fn nodes(&self) -> impl Iterator<Item = MarkupNode> + '_ {
self.0.children().filter_map(|node| match node.kind() { self.0.children().filter_map(|node| match node.kind() {
NodeKind::Space(2 ..) => Some(MarkupNode::Parbreak), NodeKind::Space { newlines: (2 ..) } => Some(MarkupNode::Parbreak),
NodeKind::Space(_) => Some(MarkupNode::Space), NodeKind::Space { .. } => Some(MarkupNode::Space),
&NodeKind::Linebreak { justified } => { &NodeKind::Linebreak { justified } => {
Some(MarkupNode::Linebreak { justified }) Some(MarkupNode::Linebreak { justified })
} }
@ -159,7 +159,7 @@ pub struct RawNode {
pub block: bool, pub block: bool,
} }
/// A math formula: `$a^2 + b^2 = c^2$`. /// A math formula: `$x$`, `$[x^2]$`.
#[derive(Debug, Clone, PartialEq, Hash)] #[derive(Debug, Clone, PartialEq, Hash)]
pub struct MathNode { pub struct MathNode {
/// The formula between the dollars / brackets. /// The formula between the dollars / brackets.
@ -514,7 +514,7 @@ impl DictExpr {
pub enum DictItem { pub enum DictItem {
/// A named pair: `thickness: 3pt`. /// A named pair: `thickness: 3pt`.
Named(Named), Named(Named),
/// A keyed pair: `"spaced key": true`. /// A keyed pair: `"spacy key": true`.
Keyed(Keyed), Keyed(Keyed),
/// A spreaded value: `..things`. /// A spreaded value: `..things`.
Spread(Expr), Spread(Expr),
@ -557,12 +557,12 @@ impl Named {
} }
node! { node! {
/// A pair of a string key and an expression: `"spaced key": true`. /// A pair of a string key and an expression: `"spacy key": true`.
Keyed Keyed
} }
impl Keyed { impl Keyed {
/// The key: `"spaced key"`. /// The key: `"spacy key"`.
pub fn key(&self) -> EcoString { pub fn key(&self) -> EcoString {
self.0 self.0
.children() .children()
@ -593,7 +593,7 @@ impl UnaryExpr {
.expect("unary expression is missing operator") .expect("unary expression is missing operator")
} }
/// The expression to operator on: `x`. /// The expression to operate on: `x`.
pub fn expr(&self) -> Expr { pub fn expr(&self) -> Expr {
self.0.cast_last_child().expect("unary expression is missing child") self.0.cast_last_child().expect("unary expression is missing child")
} }
@ -1010,9 +1010,10 @@ impl LetExpr {
/// The expression the binding is initialized with. /// The expression the binding is initialized with.
pub fn init(&self) -> Option<Expr> { pub fn init(&self) -> Option<Expr> {
if self.0.cast_first_child::<Ident>().is_some() { if self.0.cast_first_child::<Ident>().is_some() {
// This is a normal binding like `let x = 1`.
self.0.children().filter_map(SyntaxNode::cast).nth(1) self.0.children().filter_map(SyntaxNode::cast).nth(1)
} else { } else {
// This is a let .. with expression. // This is a closure binding like `let f(x) = 1`.
self.0.cast_first_child() self.0.cast_first_child()
} }
} }
@ -1187,7 +1188,7 @@ impl ImportExpr {
.expect("import is missing items") .expect("import is missing items")
} }
/// The location of the importable file. /// The path to the file that should be imported.
pub fn path(&self) -> Expr { pub fn path(&self) -> Expr {
self.0.cast_last_child().expect("import is missing path") self.0.cast_last_child().expect("import is missing path")
} }
@ -1208,7 +1209,7 @@ node! {
} }
impl IncludeExpr { impl IncludeExpr {
/// The location of the file to be included. /// The path to the file that should be included.
pub fn path(&self) -> Expr { pub fn path(&self) -> Expr {
self.0.cast_last_child().expect("include is missing path") self.0.cast_last_child().expect("include is missing path")
} }
@ -1225,7 +1226,7 @@ node! {
} }
node! { node! {
/// A return expression: `return x + 1`. /// A return expression: `return`, `return x + 1`.
ReturnExpr ReturnExpr
} }

View File

@ -60,7 +60,7 @@ where
highlight_themed_impl(text, 0, &root, vec![], &highlighter, &mut f); highlight_themed_impl(text, 0, &root, vec![], &highlighter, &mut f);
} }
/// Recursive implementation for returning syntect styles. /// Recursive implementation for highlighting with a syntect theme.
fn highlight_themed_impl<F>( fn highlight_themed_impl<F>(
text: &str, text: &str,
mut offset: usize, mut offset: usize,
@ -273,7 +273,7 @@ impl Category {
NodeKind::None => Some(Category::None), NodeKind::None => Some(Category::None),
NodeKind::Auto => Some(Category::Auto), NodeKind::Auto => Some(Category::Auto),
NodeKind::Ident(_) => match parent.kind() { NodeKind::Ident(_) => match parent.kind() {
NodeKind::Markup(_) => Some(Category::Interpolated), NodeKind::Markup { .. } => Some(Category::Interpolated),
NodeKind::FuncCall => Some(Category::Function), NodeKind::FuncCall => Some(Category::Function),
NodeKind::MethodCall if i > 0 => Some(Category::Function), NodeKind::MethodCall if i > 0 => Some(Category::Function),
NodeKind::ClosureExpr if i == 0 => Some(Category::Function), NodeKind::ClosureExpr if i == 0 => Some(Category::Function),
@ -298,8 +298,8 @@ impl Category {
NodeKind::Error(_, _) => Some(Category::Invalid), NodeKind::Error(_, _) => Some(Category::Invalid),
NodeKind::Unknown(_) => Some(Category::Invalid), NodeKind::Unknown(_) => Some(Category::Invalid),
NodeKind::Underscore => None, NodeKind::Underscore => None,
NodeKind::Markup(_) => None, NodeKind::Markup { .. } => None,
NodeKind::Space(_) => None, NodeKind::Space { .. } => None,
NodeKind::Text(_) => None, NodeKind::Text(_) => None,
NodeKind::Quote { .. } => None, NodeKind::Quote { .. } => None,
NodeKind::List => None, NodeKind::List => None,

View File

@ -27,7 +27,7 @@ pub enum SyntaxNode {
} }
impl SyntaxNode { impl SyntaxNode {
/// Returns the metadata of the node. /// The metadata of the node.
pub fn data(&self) -> &NodeData { pub fn data(&self) -> &NodeData {
match self { match self {
Self::Inner(inner) => &inner.data, Self::Inner(inner) => &inner.data,
@ -58,14 +58,6 @@ impl SyntaxNode {
self.data().span() self.data().span()
} }
/// The node's children.
pub fn children(&self) -> std::slice::Iter<'_, SyntaxNode> {
match self {
Self::Inner(inner) => inner.children(),
Self::Leaf(_) => [].iter(),
}
}
/// Whether the node or its children contain an error. /// Whether the node or its children contain an error.
pub fn erroneous(&self) -> bool { pub fn erroneous(&self) -> bool {
match self { match self {
@ -92,6 +84,14 @@ impl SyntaxNode {
} }
} }
/// The node's children.
pub fn children(&self) -> std::slice::Iter<'_, SyntaxNode> {
match self {
Self::Inner(inner) => inner.children(),
Self::Leaf(_) => [].iter(),
}
}
/// Convert the node to a typed AST node. /// Convert the node to a typed AST node.
pub fn cast<T>(&self) -> Option<T> pub fn cast<T>(&self) -> Option<T>
where where
@ -100,12 +100,12 @@ impl SyntaxNode {
T::from_untyped(self) T::from_untyped(self)
} }
/// Get the first child that can cast to some AST type. /// Get the first child that can cast to the AST type `T`.
pub fn cast_first_child<T: TypedNode>(&self) -> Option<T> { pub fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
self.children().find_map(Self::cast) self.children().find_map(Self::cast)
} }
/// Get the last child that can cast to some AST type. /// Get the last child that can cast to the AST type `T`.
pub fn cast_last_child<T: TypedNode>(&self) -> Option<T> { pub fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
self.children().rev().find_map(Self::cast) self.children().rev().find_map(Self::cast)
} }
@ -358,7 +358,7 @@ impl InnerNode {
&mut self.children &mut self.children
} }
/// Replaces a range of children with some replacement. /// Replaces a range of children with a replacement.
/// ///
/// May have mutated the children if it returns `Err(_)`. /// May have mutated the children if it returns `Err(_)`.
pub(crate) fn replace_children( pub(crate) fn replace_children(
@ -440,8 +440,7 @@ impl InnerNode {
} }
} }
/// Update the this node given after changes were made to one of its /// Update this node after changes were made to one of its children.
/// children.
pub(crate) fn update_parent( pub(crate) fn update_parent(
&mut self, &mut self,
prev_len: usize, prev_len: usize,
@ -572,57 +571,61 @@ impl PartialEq for NodeData {
/// the parser. /// the parser.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum NodeKind { pub enum NodeKind {
/// A left curly brace: `{`. /// A left curly brace, starting a code block: `{`.
LeftBrace, LeftBrace,
/// A right curly brace: `}`. /// A right curly brace, terminating a code block: `}`.
RightBrace, RightBrace,
/// A left square bracket: `[`. /// A left square bracket, starting a content block: `[`.
LeftBracket, LeftBracket,
/// A right square bracket: `]`. /// A right square bracket, terminating a content block: `]`.
RightBracket, RightBracket,
/// A left round parenthesis: `(`. /// A left round parenthesis, starting a grouped expression, collection,
/// argument or parameter list: `(`.
LeftParen, LeftParen,
/// A right round parenthesis: `)`. /// A right round parenthesis, terminating a grouped expression, collection,
/// argument or parameter list: `)`.
RightParen, RightParen,
/// An asterisk: `*`. /// The strong text toggle, multiplication operator, and wildcard import
/// symbol: `*`.
Star, Star,
/// An underscore: `_`. /// Toggles emphasized text: `_`.
Underscore, Underscore,
/// A comma: `,`. /// A comma separator in a sequence: `,`.
Comma, Comma,
/// A semicolon: `;`. /// A semicolon terminating an expression: `;`.
Semicolon, Semicolon,
/// A colon: `:`. /// A colon between name / key and value in a dictionary, argument or
/// parameter list: `:`.
Colon, Colon,
/// A plus: `+`. /// The unary plus and addition operator: `+`.
Plus, Plus,
/// A hyphen: `-`. /// The unary negation and subtraction operator: `-`.
Minus, Minus,
/// A slash: `/`. /// The division operator: `/`.
Slash, Slash,
/// A dot: `.`. /// A field access and method call operator: `.`.
Dot, Dot,
/// A single equals sign: `=`. /// The assignment operator: `=`.
Eq, Eq,
/// Two equals signs: `==`. /// The equality operator: `==`.
EqEq, EqEq,
/// An exclamation mark followed by an equals sign: `!=`. /// The inequality operator: `!=`.
ExclEq, ExclEq,
/// A less-than sign: `<`. /// The less-than operator: `<`.
Lt, Lt,
/// A less-than sign followed by an equals sign: `<=`. /// The less-than or equal operator: `<=`.
LtEq, LtEq,
/// A greater-than sign: `>`. /// The greater-than operator: `>`.
Gt, Gt,
/// A greater-than sign followed by an equals sign: `>=`. /// The greater-than or equal operator: `>=`.
GtEq, GtEq,
/// A plus followed by an equals sign: `+=`. /// The add-assign operator: `+=`.
PlusEq, PlusEq,
/// A hyphen followed by an equals sign: `-=`. /// The subtract-assign operator: `-=`.
HyphEq, HyphEq,
/// An asterisk followed by an equals sign: `*=`. /// The multiply-assign operator: `*=`.
StarEq, StarEq,
/// A slash followed by an equals sign: `/=`. /// The divide-assign operator: `/=`.
SlashEq, SlashEq,
/// The `not` operator. /// The `not` operator.
Not, Not,
@ -630,9 +633,9 @@ pub enum NodeKind {
And, And,
/// The `or` operator. /// The `or` operator.
Or, Or,
/// Two dots: `..`. /// The spread operator: `..`.
Dots, Dots,
/// An equals sign followed by a greater-than sign: `=>`. /// An arrow between a closure's parameters and body: `=>`.
Arrow, Arrow,
/// The none literal: `none`. /// The none literal: `none`.
None, None,
@ -670,15 +673,20 @@ pub enum NodeKind {
From, From,
/// The `as` keyword. /// The `as` keyword.
As, As,
/// Markup of which all lines must start in some column. /// Markup of which all lines must have a minimal indentation.
/// ///
/// Notably, the number does not determine in which column the markup /// Notably, the number does not determine in which column the markup
/// started, but to the right of which column all markup elements must be, /// started, but to the right of which column all markup elements must be,
/// so it is zero except for headings and lists. /// so it is zero except for headings and lists.
Markup(usize), Markup { min_indent: usize },
/// One or more whitespace characters. /// One or more whitespace characters. Single spaces are collapsed into text
Space(usize), /// nodes if they would otherwise be surrounded by text nodes.
/// A consecutive non-markup string. ///
/// Also stores how many newlines are contained.
Space { newlines: usize },
/// Consecutive text without markup. While basic text with just single
/// spaces is collapsed into a single node, certain symbols that could
/// possibly be markup force text into multiple nodes.
Text(EcoString), Text(EcoString),
/// A forced line break: `\` or `\+` if justified. /// A forced line break: `\` or `\+` if justified.
Linebreak { justified: bool }, Linebreak { justified: bool },
@ -701,10 +709,9 @@ pub enum NodeKind {
Strong, Strong,
/// Emphasized content: `_Emphasized_`. /// Emphasized content: `_Emphasized_`.
Emph, Emph,
/// An arbitrary number of backticks followed by inner contents, terminated /// A raw block with optional syntax highlighting: `` `...` ``.
/// with the same number of backticks: `` `...` ``.
Raw(Arc<RawNode>), Raw(Arc<RawNode>),
/// Dollar signs surrounding inner contents. /// A math formula: `$x$`, `$[x^2]$`.
Math(Arc<MathNode>), Math(Arc<MathNode>),
/// A section heading: `= Introduction`. /// A section heading: `= Introduction`.
Heading, Heading,
@ -740,7 +747,7 @@ pub enum NodeKind {
DictExpr, DictExpr,
/// A named pair: `thickness: 3pt`. /// A named pair: `thickness: 3pt`.
Named, Named,
/// A keyed pair: `"spaced key": true`. /// A keyed pair: `"spacy key": true`.
Keyed, Keyed,
/// A unary operation: `-x`. /// A unary operation: `-x`.
UnaryExpr, UnaryExpr,
@ -803,24 +810,14 @@ pub enum NodeKind {
} }
impl NodeKind { impl NodeKind {
/// Whether this is some kind of brace. /// Whether this is a kind of parenthesis.
pub fn is_brace(&self) -> bool {
matches!(self, Self::LeftBrace | Self::RightBrace)
}
/// Whether this is some kind of bracket.
pub fn is_bracket(&self) -> bool {
matches!(self, Self::LeftBracket | Self::RightBracket)
}
/// Whether this is some kind of parenthesis.
pub fn is_paren(&self) -> bool { pub fn is_paren(&self) -> bool {
matches!(self, Self::LeftParen | Self::RightParen) matches!(self, Self::LeftParen | Self::RightParen)
} }
/// Whether this is a space. /// Whether this is a space.
pub fn is_space(&self) -> bool { pub fn is_space(&self) -> bool {
matches!(self, Self::Space(_)) matches!(self, Self::Space { .. })
} }
/// Whether this is trivia. /// Whether this is trivia.
@ -828,31 +825,23 @@ impl NodeKind {
self.is_space() || matches!(self, Self::LineComment | Self::BlockComment) self.is_space() || matches!(self, Self::LineComment | Self::BlockComment)
} }
/// Whether this is some kind of error. /// Whether this is a kind of error.
pub fn is_error(&self) -> bool { pub fn is_error(&self) -> bool {
matches!(self, NodeKind::Error(_, _) | NodeKind::Unknown(_)) matches!(self, NodeKind::Error(_, _) | NodeKind::Unknown(_))
} }
/// Whether this node is `at_start` given the previous value of the property. /// Whether `at_start` would still be true after this node given the
/// previous value of the property.
pub fn is_at_start(&self, prev: bool) -> bool { pub fn is_at_start(&self, prev: bool) -> bool {
match self { match self {
Self::Space(1 ..) => true, Self::Space { newlines: (1 ..) } => true,
Self::Space(_) | Self::LineComment | Self::BlockComment => prev, Self::Space { .. } | Self::LineComment | Self::BlockComment => prev,
_ => false, _ => false,
} }
} }
/// Whether this node has to appear at the start of a line. /// Whether changes _inside_ this node are safely encapuslated, so that only
pub fn only_at_start(&self) -> bool { /// this node must be reparsed.
match self {
Self::Heading | Self::Enum | Self::List => true,
Self::Text(t) => t == "-" || t.ends_with('.'),
_ => false,
}
}
/// Whether this is a node that is clearly delimited by a character and may
/// appear in markup.
pub fn is_bounded(&self) -> bool { pub fn is_bounded(&self) -> bool {
match self { match self {
Self::CodeBlock Self::CodeBlock
@ -865,7 +854,7 @@ impl NodeKind {
| Self::Ellipsis | Self::Ellipsis
| Self::Quote { .. } | Self::Quote { .. }
| Self::BlockComment | Self::BlockComment
| Self::Space(_) | Self::Space { .. }
| Self::Escape(_) => true, | Self::Escape(_) => true,
Self::Text(t) => t != "-" && !t.ends_with('.'), Self::Text(t) => t != "-" && !t.ends_with('.'),
_ => false, _ => false,
@ -924,9 +913,9 @@ impl NodeKind {
Self::Import => "keyword `import`", Self::Import => "keyword `import`",
Self::Include => "keyword `include`", Self::Include => "keyword `include`",
Self::From => "keyword `from`", Self::From => "keyword `from`",
Self::Markup(_) => "markup", Self::Markup { .. } => "markup",
Self::Space(2 ..) => "paragraph break", Self::Space { newlines: (2 ..) } => "paragraph break",
Self::Space(_) => "space", Self::Space { .. } => "space",
Self::Linebreak { justified: false } => "linebreak", Self::Linebreak { justified: false } => "linebreak",
Self::Linebreak { justified: true } => "justified linebreak", Self::Linebreak { justified: true } => "justified linebreak",
Self::Text(_) => "text", Self::Text(_) => "text",
@ -1052,8 +1041,8 @@ impl Hash for NodeKind {
Self::Import => {} Self::Import => {}
Self::Include => {} Self::Include => {}
Self::From => {} Self::From => {}
Self::Markup(c) => c.hash(state), Self::Markup { min_indent } => min_indent.hash(state),
Self::Space(n) => n.hash(state), Self::Space { newlines } => newlines.hash(state),
Self::Linebreak { justified } => justified.hash(state), Self::Linebreak { justified } => justified.hash(state),
Self::Text(s) => s.hash(state), Self::Text(s) => s.hash(state),
Self::NonBreakingSpace => {} Self::NonBreakingSpace => {}

View File

@ -4,19 +4,19 @@ use std::ops::Range;
use crate::syntax::SourceId; use crate::syntax::SourceId;
/// A value with the span it corresponds to in the source code. /// A value with a span locating it in the source code.
#[derive(Copy, Clone, Eq, PartialEq, Hash)] #[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Spanned<T> { pub struct Spanned<T> {
/// The spanned value. /// The spanned value.
pub v: T, pub v: T,
/// The location in source code of the value. /// The value's location in source code.
pub span: Span, pub span: Span,
} }
impl<T> Spanned<T> { impl<T> Spanned<T> {
/// Create a new instance from a value and its span. /// Create a new instance from a value and its span.
pub fn new(v: T, span: impl Into<Span>) -> Self { pub fn new(v: T, span: Span) -> Self {
Self { v, span: span.into() } Self { v, span }
} }
/// Convert from `&Spanned<T>` to `Spanned<&T>` /// Convert from `&Spanned<T>` to `Spanned<&T>`
@ -24,7 +24,7 @@ impl<T> Spanned<T> {
Spanned { v: &self.v, span: self.span } Spanned { v: &self.v, span: self.span }
} }
/// Map the value using a function keeping the span. /// Map the value using a function.
pub fn map<F, U>(self, f: F) -> Spanned<U> pub fn map<F, U>(self, f: F) -> Spanned<U>
where where
F: FnOnce(T) -> U, F: FnOnce(T) -> U,
@ -52,11 +52,11 @@ impl<T: Debug> Debug for Spanned<T> {
/// sibling and smaller than any id in the subtrees of any right sibling. /// sibling and smaller than any id in the subtrees of any right sibling.
/// ///
/// The internal ids of spans stay mostly stable, even for nodes behind an /// The internal ids of spans stay mostly stable, even for nodes behind an
/// insertion. This is not true for simple ranges as they shift. Spans can be /// insertion. This is not true for simple ranges as they would shift. Spans can
/// used as inputs to memoized functions without hurting cache performance when /// be used as inputs to memoized functions without hurting cache performance
/// text is inserted somewhere in the document other than the end. /// when text is inserted somewhere in the document other than the end.
/// ///
/// This type takes 8 bytes and is null-optimized (i.e. `Option<Span>` also /// This type takes up 8 bytes and is null-optimized (i.e. `Option<Span>` also
/// takes 8 bytes). /// takes 8 bytes).
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Span(NonZeroU64); pub struct Span(NonZeroU64);
@ -90,7 +90,7 @@ impl Span {
Self(to_non_zero(Self::DETACHED)) Self(to_non_zero(Self::DETACHED))
} }
/// Return a new span with updated position. /// Return this span, but with updated position.
pub const fn with_pos(self, pos: SpanPos) -> Self { pub const fn with_pos(self, pos: SpanPos) -> Self {
let bits = (self.0.get() & ((1 << 62) - 1)) | ((pos as u64) << 62); let bits = (self.0.get() & ((1 << 62) - 1)) | ((pos as u64) << 62);
Self(to_non_zero(bits)) Self(to_non_zero(bits))

View File

@ -22,8 +22,8 @@ macro_rules! format_eco {
pub struct EcoString(Repr); pub struct EcoString(Repr);
/// The internal representation. Either: /// The internal representation. Either:
/// - inline when below a certain number of bytes, /// - inline when below a certain number of bytes, or
/// - or reference-counted on the heap with COW semantics. /// - reference-counted on the heap with clone-on-write semantics.
#[derive(Clone)] #[derive(Clone)]
enum Repr { enum Repr {
Small { buf: [u8; LIMIT], len: u8 }, Small { buf: [u8; LIMIT], len: u8 },

View File

@ -1,25 +0,0 @@
/// Decode mac roman encoded bytes into a string.
pub fn decode_mac_roman(coded: &[u8]) -> String {
coded.iter().copied().map(char_from_mac_roman).collect()
}
/// Convert a mac roman coded character to a unicode char.
fn char_from_mac_roman(code: u8) -> char {
#[rustfmt::skip]
const TABLE: [char; 128] = [
'Ä', 'Å', 'Ç', 'É', 'Ñ', 'Ö', 'Ü', 'á', 'à', 'â', 'ä', 'ã', 'å', 'ç', 'é', 'è',
'ê', 'ë', 'í', 'ì', 'î', 'ï', 'ñ', 'ó', 'ò', 'ô', 'ö', 'õ', 'ú', 'ù', 'û', 'ü',
'†', '°', '¢', '£', '§', '•', '¶', 'ß', '®', '©', '™', '´', '¨', '≠', 'Æ', 'Ø',
'∞', '±', '≤', '≥', '¥', 'µ', '∂', '∑', '∏', 'π', '∫', 'ª', 'º', 'Ω', 'æ', 'ø',
'¿', '¡', '¬', '√', 'ƒ', '≈', '∆', '«', '»', '…', '\u{a0}', 'À', 'Ã', 'Õ', 'Œ', 'œ',
'', '—', '“', '”', '', '', '÷', '◊', 'ÿ', 'Ÿ', '', '€', '', '', 'fi', 'fl',
'‡', '·', '', '„', '‰', 'Â', 'Ê', 'Á', 'Ë', 'È', 'Í', 'Î', 'Ï', 'Ì', 'Ó', 'Ô',
'\u{f8ff}', 'Ò', 'Ú', 'Û', 'Ù', 'ı', 'ˆ', '˜', '¯', '˘', '˙', '˚', '¸', '˝', '˛', 'ˇ',
];
if code < 128 {
code as char
} else {
TABLE[(code - 128) as usize]
}
}

View File

@ -1,18 +1,14 @@
//! Utilities. //! Utilities.
#[macro_use] #[macro_use]
mod eco_string; mod eco;
mod mac_roman; mod hash;
mod prehashed;
pub use eco_string::EcoString; pub use eco::EcoString;
pub use mac_roman::decode_mac_roman; pub use hash::Prehashed;
pub use prehashed::Prehashed;
use std::any::TypeId; use std::any::TypeId;
use std::cmp::Ordering;
use std::fmt::{self, Debug, Formatter}; use std::fmt::{self, Debug, Formatter};
use std::ops::Range;
use std::path::{Component, Path, PathBuf}; use std::path::{Component, Path, PathBuf};
use std::sync::Arc; use std::sync::Arc;
@ -35,35 +31,6 @@ where
Wrapper(f) Wrapper(f)
} }
/// An alternative type id that prints as something readable in debug mode.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct ReadableTypeId {
id: TypeId,
#[cfg(debug_assertions)]
name: &'static str,
}
impl ReadableTypeId {
/// The type id of the given type.
pub fn of<T: 'static>() -> Self {
Self {
id: TypeId::of::<T>(),
#[cfg(debug_assertions)]
name: std::any::type_name::<T>(),
}
}
}
impl Debug for ReadableTypeId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
#[cfg(debug_assertions)]
f.pad(self.name)?;
#[cfg(not(debug_assertions))]
f.pad("ReadableTypeId")?;
Ok(())
}
}
/// Extra methods for [`str`]. /// Extra methods for [`str`].
pub trait StrExt { pub trait StrExt {
/// The number of code units this string would use if it was encoded in /// The number of code units this string would use if it was encoded in
@ -77,41 +44,6 @@ impl StrExt for str {
} }
} }
/// Extra methods for [`Option<T>`].
pub trait OptionExt<T> {
/// Sets `other` as the value if `self` is `None` or if it contains a value
/// larger than `other`.
fn set_min(&mut self, other: T)
where
T: Ord;
/// Sets `other` as the value if `self` is `None` or if it contains a value
/// smaller than `other`.
fn set_max(&mut self, other: T)
where
T: Ord;
}
impl<T> OptionExt<T> for Option<T> {
fn set_min(&mut self, other: T)
where
T: Ord,
{
if self.as_ref().map_or(true, |x| other < *x) {
*self = Some(other);
}
}
fn set_max(&mut self, other: T)
where
T: Ord,
{
if self.as_ref().map_or(true, |x| other > *x) {
*self = Some(other);
}
}
}
/// Extra methods for [`Arc`]. /// Extra methods for [`Arc`].
pub trait ArcExt<T> { pub trait ArcExt<T> {
/// Takes the inner value if there is exactly one strong reference and /// Takes the inner value if there is exactly one strong reference and
@ -131,7 +63,7 @@ where
} }
} }
/// Extra methods for `[T]`. /// Extra methods for [`[T]`](slice).
pub trait SliceExt<T> { pub trait SliceExt<T> {
/// Split a slice into consecutive runs with the same key and yield for /// Split a slice into consecutive runs with the same key and yield for
/// each such run the key and the slice of elements with that key. /// each such run the key and the slice of elements with that key.
@ -170,34 +102,6 @@ where
} }
} }
/// Extra methods for [`Range<usize>`].
pub trait RangeExt {
/// Locate a position relative to a range.
///
/// This can be used for binary searching the range that contains the
/// position as follows:
/// ```
/// # use typst::util::RangeExt;
/// assert_eq!(
/// [1..2, 2..7, 7..10].binary_search_by(|r| r.locate(5)),
/// Ok(1),
/// );
/// ```
fn locate(&self, pos: usize) -> Ordering;
}
impl RangeExt for Range<usize> {
fn locate(&self, pos: usize) -> Ordering {
if pos < self.start {
Ordering::Greater
} else if pos < self.end {
Ordering::Equal
} else {
Ordering::Less
}
}
}
/// Extra methods for [`Path`]. /// Extra methods for [`Path`].
pub trait PathExt { pub trait PathExt {
/// Lexically normalize a path. /// Lexically normalize a path.
@ -222,3 +126,32 @@ impl PathExt for Path {
out out
} }
} }
/// An alternative type id that prints as something readable in debug mode.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct ReadableTypeId {
id: TypeId,
#[cfg(debug_assertions)]
name: &'static str,
}
impl ReadableTypeId {
/// The type id of the given type.
pub fn of<T: 'static>() -> Self {
Self {
id: TypeId::of::<T>(),
#[cfg(debug_assertions)]
name: std::any::type_name::<T>(),
}
}
}
impl Debug for ReadableTypeId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
#[cfg(debug_assertions)]
f.pad(self.name)?;
#[cfg(not(debug_assertions))]
f.pad("ReadableTypeId")?;
Ok(())
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.5 KiB

View File

@ -8,11 +8,11 @@
{(:)} {(:)}
// Two pairs and string key. // Two pairs and string key.
#let dict = (normal: 1, "spaced key": 2) #let dict = (normal: 1, "spacy key": 2)
#dict #dict
#test(dict.normal, 1) #test(dict.normal, 1)
#test(dict("spaced key"), 2) #test(dict("spacy key"), 2)
--- ---
// Test lvalue and rvalue access. // Test lvalue and rvalue access.

View File

@ -32,8 +32,8 @@ fn main() {
let args = Args::new(env::args().skip(1)); let args = Args::new(env::args().skip(1));
let mut filtered = Vec::new(); let mut filtered = Vec::new();
// Since differents tests can affect each other through the layout cache, a // Since differents tests can affect each other through the memoization
// deterministic order is very important for reproducibility. // cache, a deterministic order is important for reproducibility.
for entry in WalkDir::new(".").sort_by_file_name() { for entry in WalkDir::new(".").sort_by_file_name() {
let entry = entry.unwrap(); let entry = entry.unwrap();
if entry.depth() <= 1 { if entry.depth() <= 1 {
@ -599,8 +599,8 @@ fn render_links(
} }
} }
/// This is a Linear-feedback shift register using XOR as its shifting /// A Linear-feedback shift register using XOR as its shifting function.
/// function. It can be used as PRNG. /// Can be used as PRNG.
struct LinearShift(u64); struct LinearShift(u64);
impl LinearShift { impl LinearShift {