mirror of
https://github.com/typst/typst
synced 2025-05-13 12:36:23 +08:00
Style changes
This commit is contained in:
parent
33928a00dc
commit
eb951c008b
@ -115,10 +115,7 @@ fn parse_args() -> StrResult<Command> {
|
|||||||
// Don't allow excess arguments.
|
// Don't allow excess arguments.
|
||||||
let rest = args.finish();
|
let rest = args.finish();
|
||||||
if !rest.is_empty() {
|
if !rest.is_empty() {
|
||||||
Err(format!(
|
Err(format!("unexpected argument{}", if rest.len() > 1 { "s" } else { "" }))?;
|
||||||
"unexpected argument{}",
|
|
||||||
if rest.len() > 1 { "s" } else { "" }
|
|
||||||
))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(command)
|
Ok(command)
|
||||||
|
@ -101,14 +101,12 @@ pub fn mod_(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
|
|||||||
(Value::Int(a), Value::Float(b)) => (a as f64, b),
|
(Value::Int(a), Value::Float(b)) => (a as f64, b),
|
||||||
(Value::Float(a), Value::Int(b)) => (a, b as f64),
|
(Value::Float(a), Value::Int(b)) => (a, b as f64),
|
||||||
(Value::Float(a), Value::Float(b)) => (a, b),
|
(Value::Float(a), Value::Float(b)) => (a, b),
|
||||||
(Value::Int(_), b) | (Value::Float(_), b) => bail!(
|
(Value::Int(_), b) | (Value::Float(_), b) => {
|
||||||
span2,
|
bail!(span2, format!("expected integer or float, found {}", b.type_name()))
|
||||||
format!("expected integer or float, found {}", b.type_name())
|
}
|
||||||
),
|
(a, _) => {
|
||||||
(a, _) => bail!(
|
bail!(span1, format!("expected integer or float, found {}", a.type_name()))
|
||||||
span1,
|
}
|
||||||
format!("expected integer or float, found {}", a.type_name())
|
|
||||||
),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if b == 0.0 {
|
if b == 0.0 {
|
||||||
|
@ -10,8 +10,7 @@ pub fn luma(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
|
|||||||
|
|
||||||
/// Create an RGB(A) color.
|
/// Create an RGB(A) color.
|
||||||
pub fn rgb(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
|
pub fn rgb(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
|
||||||
Ok(Value::Color(
|
Ok(Value::Color(if let Some(string) = args.find::<Spanned<EcoString>>()? {
|
||||||
if let Some(string) = args.find::<Spanned<EcoString>>()? {
|
|
||||||
match RgbaColor::from_str(&string.v) {
|
match RgbaColor::from_str(&string.v) {
|
||||||
Ok(color) => color.into(),
|
Ok(color) => color.into(),
|
||||||
Err(msg) => bail!(string.span, msg),
|
Err(msg) => bail!(string.span, msg),
|
||||||
@ -22,8 +21,7 @@ pub fn rgb(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
|
|||||||
let Component(b) = args.expect("blue component")?;
|
let Component(b) = args.expect("blue component")?;
|
||||||
let Component(a) = args.eat()?.unwrap_or(Component(255));
|
let Component(a) = args.eat()?.unwrap_or(Component(255));
|
||||||
RgbaColor::new(r, g, b, a).into()
|
RgbaColor::new(r, g, b, a).into()
|
||||||
},
|
}))
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a CMYK color.
|
/// Create a CMYK color.
|
||||||
|
@ -81,10 +81,7 @@ fn convert_json(value: serde_json::Value) -> Value {
|
|||||||
/// Format the user-facing JSON error message.
|
/// Format the user-facing JSON error message.
|
||||||
fn format_json_error(error: serde_json::Error) -> String {
|
fn format_json_error(error: serde_json::Error) -> String {
|
||||||
assert!(error.is_syntax() || error.is_eof());
|
assert!(error.is_syntax() || error.is_eof());
|
||||||
format!(
|
format!("failed to parse json file: syntax error in line {}", error.line())
|
||||||
"failed to parse json file: syntax error in line {}",
|
|
||||||
error.line()
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read structured data from an XML file.
|
/// Read structured data from an XML file.
|
||||||
|
@ -5,10 +5,10 @@ mod color;
|
|||||||
mod data;
|
mod data;
|
||||||
mod string;
|
mod string;
|
||||||
|
|
||||||
pub use calc::*;
|
pub use self::calc::*;
|
||||||
pub use color::*;
|
pub use self::color::*;
|
||||||
pub use data::*;
|
pub use self::data::*;
|
||||||
pub use string::*;
|
pub use self::string::*;
|
||||||
|
|
||||||
use comemo::Track;
|
use comemo::Track;
|
||||||
use typst::model::{Eval, Route, Scopes, Vm};
|
use typst::model::{Eval, Route, Scopes, Vm};
|
||||||
|
@ -55,11 +55,7 @@ impl ContentExt for Content {
|
|||||||
let mut seq = vec![];
|
let mut seq = vec![];
|
||||||
if let Some(above) = above {
|
if let Some(above) = above {
|
||||||
seq.push(
|
seq.push(
|
||||||
layout::VNode {
|
layout::VNode { amount: above.into(), weak: true, generated: true }
|
||||||
amount: above.into(),
|
|
||||||
weak: true,
|
|
||||||
generated: true,
|
|
||||||
}
|
|
||||||
.pack(),
|
.pack(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -67,11 +63,7 @@ impl ContentExt for Content {
|
|||||||
seq.push(self);
|
seq.push(self);
|
||||||
if let Some(below) = below {
|
if let Some(below) = below {
|
||||||
seq.push(
|
seq.push(
|
||||||
layout::VNode {
|
layout::VNode { amount: below.into(), weak: true, generated: true }
|
||||||
amount: below.into(),
|
|
||||||
weak: true,
|
|
||||||
generated: true,
|
|
||||||
}
|
|
||||||
.pack(),
|
.pack(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ mod image;
|
|||||||
mod line;
|
mod line;
|
||||||
mod shape;
|
mod shape;
|
||||||
|
|
||||||
|
pub use self::hide::*;
|
||||||
pub use self::image::*;
|
pub use self::image::*;
|
||||||
pub use hide::*;
|
pub use self::line::*;
|
||||||
pub use line::*;
|
pub use self::shape::*;
|
||||||
pub use shape::*;
|
|
||||||
|
@ -74,12 +74,8 @@ impl LayoutBlock for ColumnsNode {
|
|||||||
let mut output = Frame::new(Size::new(regions.first.x, height));
|
let mut output = Frame::new(Size::new(regions.first.x, height));
|
||||||
let mut cursor = Abs::zero();
|
let mut cursor = Abs::zero();
|
||||||
|
|
||||||
for _ in 0 .. columns {
|
for _ in 0..columns {
|
||||||
let frame = match frames.next() {
|
let Some(frame) = frames.next() else { break };
|
||||||
Some(frame) => frame,
|
|
||||||
None => break,
|
|
||||||
};
|
|
||||||
|
|
||||||
if !regions.expand.y {
|
if !regions.expand.y {
|
||||||
output.size_mut().y.set_max(frame.height());
|
output.size_mut().y.set_max(frame.height());
|
||||||
}
|
}
|
||||||
|
@ -170,13 +170,13 @@ impl<'a> GridLayouter<'a> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Collect content and gutter columns.
|
// Collect content and gutter columns.
|
||||||
for x in 0 .. c {
|
for x in 0..c {
|
||||||
cols.push(get_or(tracks.x, x, auto));
|
cols.push(get_or(tracks.x, x, auto));
|
||||||
cols.push(get_or(gutter.x, x, zero));
|
cols.push(get_or(gutter.x, x, zero));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect content and gutter rows.
|
// Collect content and gutter rows.
|
||||||
for y in 0 .. r {
|
for y in 0..r {
|
||||||
rows.push(get_or(tracks.y, y, auto));
|
rows.push(get_or(tracks.y, y, auto));
|
||||||
rows.push(get_or(gutter.y, y, zero));
|
rows.push(get_or(gutter.y, y, zero));
|
||||||
}
|
}
|
||||||
@ -214,7 +214,7 @@ impl<'a> GridLayouter<'a> {
|
|||||||
fn layout(mut self) -> SourceResult<Vec<Frame>> {
|
fn layout(mut self) -> SourceResult<Vec<Frame>> {
|
||||||
self.measure_columns()?;
|
self.measure_columns()?;
|
||||||
|
|
||||||
for y in 0 .. self.rows.len() {
|
for y in 0..self.rows.len() {
|
||||||
// Skip to next region if current one is full, but only for content
|
// Skip to next region if current one is full, but only for content
|
||||||
// rows, not for gutter rows.
|
// rows, not for gutter rows.
|
||||||
if y % 2 == 0 && self.regions.is_full() {
|
if y % 2 == 0 && self.regions.is_full() {
|
||||||
@ -295,7 +295,7 @@ impl<'a> GridLayouter<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut resolved = Abs::zero();
|
let mut resolved = Abs::zero();
|
||||||
for y in 0 .. self.rows.len() {
|
for y in 0..self.rows.len() {
|
||||||
if let Some(cell) = self.cell(x, y) {
|
if let Some(cell) = self.cell(x, y) {
|
||||||
let size = Size::new(available, self.regions.base.y);
|
let size = Size::new(available, self.regions.base.y);
|
||||||
let mut pod =
|
let mut pod =
|
||||||
@ -412,7 +412,7 @@ impl<'a> GridLayouter<'a> {
|
|||||||
// eaten up by any fr rows.
|
// eaten up by any fr rows.
|
||||||
if self.fr.is_zero() {
|
if self.fr.is_zero() {
|
||||||
let len = resolved.len();
|
let len = resolved.len();
|
||||||
for (region, target) in self.regions.iter().zip(&mut resolved[.. len - 1]) {
|
for (region, target) in self.regions.iter().zip(&mut resolved[..len - 1]) {
|
||||||
target.set_max(region.y);
|
target.set_max(region.y);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -502,7 +502,7 @@ impl<'a> GridLayouter<'a> {
|
|||||||
// Prepare regions.
|
// Prepare regions.
|
||||||
let size = Size::new(self.used.x, heights[0]);
|
let size = Size::new(self.used.x, heights[0]);
|
||||||
let mut pod = Regions::one(size, self.regions.base, Axes::splat(true));
|
let mut pod = Regions::one(size, self.regions.base, Axes::splat(true));
|
||||||
pod.backlog = heights[1 ..].to_vec();
|
pod.backlog = heights[1..].to_vec();
|
||||||
|
|
||||||
// Layout the row.
|
// Layout the row.
|
||||||
let mut pos = Point::zero();
|
let mut pos = Point::zero();
|
||||||
|
@ -12,17 +12,17 @@ mod spacing;
|
|||||||
mod stack;
|
mod stack;
|
||||||
mod transform;
|
mod transform;
|
||||||
|
|
||||||
pub use align::*;
|
pub use self::align::*;
|
||||||
pub use columns::*;
|
pub use self::columns::*;
|
||||||
pub use container::*;
|
pub use self::container::*;
|
||||||
pub use flow::*;
|
pub use self::flow::*;
|
||||||
pub use grid::*;
|
pub use self::grid::*;
|
||||||
pub use pad::*;
|
pub use self::pad::*;
|
||||||
pub use page::*;
|
pub use self::page::*;
|
||||||
pub use place::*;
|
pub use self::place::*;
|
||||||
pub use spacing::*;
|
pub use self::spacing::*;
|
||||||
pub use stack::*;
|
pub use self::stack::*;
|
||||||
pub use transform::*;
|
pub use self::transform::*;
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
@ -357,7 +357,10 @@ impl<'a> Builder<'a> {
|
|||||||
content: &'a Content,
|
content: &'a Content,
|
||||||
styles: StyleChain<'a>,
|
styles: StyleChain<'a>,
|
||||||
) -> SourceResult<bool> {
|
) -> SourceResult<bool> {
|
||||||
if let Some(mut realized) = styles.apply(self.world, Target::Node(content))? {
|
let Some(mut realized) = styles.apply(self.world, Target::Node(content))? else {
|
||||||
|
return Ok(false);
|
||||||
|
};
|
||||||
|
|
||||||
let mut map = StyleMap::new();
|
let mut map = StyleMap::new();
|
||||||
let barrier = Barrier::new(content.id());
|
let barrier = Barrier::new(content.id());
|
||||||
map.push(StyleEntry::Barrier(barrier));
|
map.push(StyleEntry::Barrier(barrier));
|
||||||
@ -365,10 +368,8 @@ impl<'a> Builder<'a> {
|
|||||||
realized = realized.styled_with_map(map);
|
realized = realized.styled_with_map(map);
|
||||||
let stored = self.scratch.templates.alloc(realized);
|
let stored = self.scratch.templates.alloc(realized);
|
||||||
self.accept(stored, styles)?;
|
self.accept(stored, styles)?;
|
||||||
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
} else {
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn styled(
|
fn styled(
|
||||||
@ -466,10 +467,7 @@ impl<'a> DocBuilder<'a> {
|
|||||||
|
|
||||||
impl Default for DocBuilder<'_> {
|
impl Default for DocBuilder<'_> {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self { pages: StyleVecBuilder::new(), keep_next: true }
|
||||||
pages: StyleVecBuilder::new(),
|
|
||||||
keep_next: true,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -658,30 +656,25 @@ impl<'a> ListBuilder<'a> {
|
|||||||
{
|
{
|
||||||
self.items.push(item.clone(), styles);
|
self.items.push(item.clone(), styles);
|
||||||
self.tight &= self.staged.drain(..).all(|(t, _)| !t.is::<ParbreakNode>());
|
self.tight &= self.staged.drain(..).all(|(t, _)| !t.is::<ParbreakNode>());
|
||||||
} else {
|
return true;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
} else if !self.items.is_empty()
|
} else if !self.items.is_empty()
|
||||||
&& (content.is::<SpaceNode>() || content.is::<ParbreakNode>())
|
&& (content.is::<SpaceNode>() || content.is::<ParbreakNode>())
|
||||||
{
|
{
|
||||||
self.staged.push((content, styles));
|
self.staged.push((content, styles));
|
||||||
} else {
|
return true;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
fn finish(self, parent: &mut Builder<'a>) -> SourceResult<()> {
|
fn finish(self, parent: &mut Builder<'a>) -> SourceResult<()> {
|
||||||
let (items, shared) = self.items.finish();
|
let (items, shared) = self.items.finish();
|
||||||
let kind = match items.items().next() {
|
|
||||||
Some(item) => item.kind(),
|
|
||||||
None => return Ok(()),
|
|
||||||
};
|
|
||||||
|
|
||||||
|
let Some(item) = items.items().next() else { return Ok(()) };
|
||||||
let tight = self.tight;
|
let tight = self.tight;
|
||||||
let attached = tight && self.attachable;
|
let attached = tight && self.attachable;
|
||||||
let content = match kind {
|
let content = match item.kind() {
|
||||||
LIST => ListNode::<LIST> { tight, attached, items }.pack(),
|
LIST => ListNode::<LIST> { tight, attached, items }.pack(),
|
||||||
ENUM => ListNode::<ENUM> { tight, attached, items }.pack(),
|
ENUM => ListNode::<ENUM> { tight, attached, items }.pack(),
|
||||||
DESC | _ => ListNode::<DESC> { tight, attached, items }.pack(),
|
DESC | _ => ListNode::<DESC> { tight, attached, items }.pack(),
|
||||||
@ -765,18 +758,15 @@ impl<'a, T> CollapsingBuilder<'a, T> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if self.last == Last::Weak {
|
if self.last == Last::Weak {
|
||||||
if let Some(i) =
|
let weak = self.staged.iter().position(|(prev_item, _, prev_weakness)| {
|
||||||
self.staged.iter().position(|(prev_item, _, prev_weakness)| {
|
|
||||||
prev_weakness.map_or(false, |prev_weakness| {
|
prev_weakness.map_or(false, |prev_weakness| {
|
||||||
weakness < prev_weakness
|
weakness < prev_weakness
|
||||||
|| (weakness == prev_weakness && item > *prev_item)
|
|| (weakness == prev_weakness && item > *prev_item)
|
||||||
})
|
})
|
||||||
})
|
});
|
||||||
{
|
|
||||||
self.staged.remove(i);
|
let Some(weak) = weak else { return };
|
||||||
} else {
|
self.staged.remove(weak);
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.staged.push((item, styles, Some(weakness)));
|
self.staged.push((item, styles, Some(weakness)));
|
||||||
|
@ -111,12 +111,7 @@ impl PageNode {
|
|||||||
let pw = size.x - pad.left - pad.right;
|
let pw = size.x - pad.left - pad.right;
|
||||||
let py = size.y - pad.bottom;
|
let py = size.y - pad.bottom;
|
||||||
for (role, marginal, pos, area) in [
|
for (role, marginal, pos, area) in [
|
||||||
(
|
(Role::Header, header, Point::with_x(pad.left), Size::new(pw, pad.top)),
|
||||||
Role::Header,
|
|
||||||
header,
|
|
||||||
Point::with_x(pad.left),
|
|
||||||
Size::new(pw, pad.top),
|
|
||||||
),
|
|
||||||
(
|
(
|
||||||
Role::Footer,
|
Role::Footer,
|
||||||
footer,
|
footer,
|
||||||
|
@ -69,12 +69,7 @@ impl LayoutInline for MathNode {
|
|||||||
_: &Regions,
|
_: &Regions,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<Vec<Frame>> {
|
) -> SourceResult<Vec<Frame>> {
|
||||||
Ok(vec![layout_tex(
|
Ok(vec![layout_tex(&self.texify(), self.display, world, styles)?])
|
||||||
&self.texify(),
|
|
||||||
self.display,
|
|
||||||
world,
|
|
||||||
styles,
|
|
||||||
)?])
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -181,7 +176,7 @@ fn escape_char(c: char) -> EcoString {
|
|||||||
/// Trim grouping parenthesis≤.
|
/// Trim grouping parenthesis≤.
|
||||||
fn unparen(s: EcoString) -> EcoString {
|
fn unparen(s: EcoString) -> EcoString {
|
||||||
if s.starts_with('(') && s.ends_with(')') {
|
if s.starts_with('(') && s.ends_with(')') {
|
||||||
s[1 .. s.len() - 1].into()
|
s[1..s.len() - 1].into()
|
||||||
} else {
|
} else {
|
||||||
s
|
s
|
||||||
}
|
}
|
||||||
|
@ -95,9 +95,9 @@ impl<const L: ListKind> Show for ListNode<L> {
|
|||||||
match name {
|
match name {
|
||||||
"tight" => Some(Value::Bool(self.tight)),
|
"tight" => Some(Value::Bool(self.tight)),
|
||||||
"attached" => Some(Value::Bool(self.attached)),
|
"attached" => Some(Value::Bool(self.attached)),
|
||||||
"items" => Some(Value::Array(
|
"items" => {
|
||||||
self.items.items().map(|item| item.encode()).collect(),
|
Some(Value::Array(self.items.items().map(|item| item.encode()).collect()))
|
||||||
)),
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -139,11 +139,7 @@ impl<const L: ListKind> Show for ListNode<L> {
|
|||||||
ListItem::List(body) => body.as_ref().clone(),
|
ListItem::List(body) => body.as_ref().clone(),
|
||||||
ListItem::Enum(_, body) => body.as_ref().clone(),
|
ListItem::Enum(_, body) => body.as_ref().clone(),
|
||||||
ListItem::Desc(item) => Content::sequence(vec![
|
ListItem::Desc(item) => Content::sequence(vec![
|
||||||
HNode {
|
HNode { amount: (-body_indent).into(), weak: false }.pack(),
|
||||||
amount: (-body_indent).into(),
|
|
||||||
weak: false,
|
|
||||||
}
|
|
||||||
.pack(),
|
|
||||||
(item.term.clone() + TextNode(':'.into()).pack()).strong(),
|
(item.term.clone() + TextNode(':'.into()).pack()).strong(),
|
||||||
SpaceNode.pack(),
|
SpaceNode.pack(),
|
||||||
item.body.clone(),
|
item.body.clone(),
|
||||||
|
@ -6,8 +6,8 @@ mod list;
|
|||||||
mod reference;
|
mod reference;
|
||||||
mod table;
|
mod table;
|
||||||
|
|
||||||
pub use doc::*;
|
pub use self::doc::*;
|
||||||
pub use heading::*;
|
pub use self::heading::*;
|
||||||
pub use list::*;
|
pub use self::list::*;
|
||||||
pub use reference::*;
|
pub use self::reference::*;
|
||||||
pub use table::*;
|
pub use self::table::*;
|
||||||
|
@ -56,13 +56,16 @@ impl<const L: DecoLine> Show for DecoNode<L> {
|
|||||||
_: Tracked<dyn World>,
|
_: Tracked<dyn World>,
|
||||||
styles: StyleChain,
|
styles: StyleChain,
|
||||||
) -> SourceResult<Content> {
|
) -> SourceResult<Content> {
|
||||||
Ok(self.0.clone().styled(TextNode::DECO, Decoration {
|
Ok(self.0.clone().styled(
|
||||||
|
TextNode::DECO,
|
||||||
|
Decoration {
|
||||||
line: L,
|
line: L,
|
||||||
stroke: styles.get(Self::STROKE).unwrap_or_default(),
|
stroke: styles.get(Self::STROKE).unwrap_or_default(),
|
||||||
offset: styles.get(Self::OFFSET),
|
offset: styles.get(Self::OFFSET),
|
||||||
extent: styles.get(Self::EXTENT),
|
extent: styles.get(Self::EXTENT),
|
||||||
evade: styles.get(Self::EVADE),
|
evade: styles.get(Self::EVADE),
|
||||||
}))
|
},
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,12 +8,12 @@ mod raw;
|
|||||||
mod shaping;
|
mod shaping;
|
||||||
mod shift;
|
mod shift;
|
||||||
|
|
||||||
pub use deco::*;
|
pub use self::deco::*;
|
||||||
pub use link::*;
|
pub use self::link::*;
|
||||||
pub use par::*;
|
pub use self::par::*;
|
||||||
pub use raw::*;
|
pub use self::raw::*;
|
||||||
pub use shaping::*;
|
pub use self::shaping::*;
|
||||||
pub use shift::*;
|
pub use self::shift::*;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
|
||||||
@ -152,7 +152,7 @@ impl TextNode {
|
|||||||
|
|
||||||
if count > 0 {
|
if count > 0 {
|
||||||
let mut list = Vec::with_capacity(count);
|
let mut list = Vec::with_capacity(count);
|
||||||
for _ in 0 .. count {
|
for _ in 0..count {
|
||||||
list.push(args.find()?.unwrap());
|
list.push(args.find()?.unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +222,7 @@ impl<'a> Preparation<'a> {
|
|||||||
let mut cursor = 0;
|
let mut cursor = 0;
|
||||||
for item in &self.items {
|
for item in &self.items {
|
||||||
let end = cursor + item.len();
|
let end = cursor + item.len();
|
||||||
if (cursor .. end).contains(&text_offset) {
|
if (cursor..end).contains(&text_offset) {
|
||||||
return Some(item);
|
return Some(item);
|
||||||
}
|
}
|
||||||
cursor = end;
|
cursor = end;
|
||||||
@ -256,7 +256,7 @@ impl<'a> Preparation<'a> {
|
|||||||
cursor += len;
|
cursor += len;
|
||||||
}
|
}
|
||||||
|
|
||||||
(expanded, &self.items[start .. end])
|
(expanded, &self.items[start..end])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -500,11 +500,14 @@ fn prepare<'a>(
|
|||||||
regions: &Regions,
|
regions: &Regions,
|
||||||
styles: StyleChain<'a>,
|
styles: StyleChain<'a>,
|
||||||
) -> SourceResult<Preparation<'a>> {
|
) -> SourceResult<Preparation<'a>> {
|
||||||
let bidi = BidiInfo::new(text, match styles.get(TextNode::DIR) {
|
let bidi = BidiInfo::new(
|
||||||
|
text,
|
||||||
|
match styles.get(TextNode::DIR) {
|
||||||
Dir::LTR => Some(BidiLevel::ltr()),
|
Dir::LTR => Some(BidiLevel::ltr()),
|
||||||
Dir::RTL => Some(BidiLevel::rtl()),
|
Dir::RTL => Some(BidiLevel::rtl()),
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
},
|
||||||
|
);
|
||||||
|
|
||||||
let mut cursor = 0;
|
let mut cursor = 0;
|
||||||
let mut items = vec![];
|
let mut items = vec![];
|
||||||
@ -514,7 +517,7 @@ fn prepare<'a>(
|
|||||||
let end = cursor + segment.len();
|
let end = cursor + segment.len();
|
||||||
match segment {
|
match segment {
|
||||||
Segment::Text(_) => {
|
Segment::Text(_) => {
|
||||||
shape_range(&mut items, world, &bidi, cursor .. end, styles);
|
shape_range(&mut items, world, &bidi, cursor..end, styles);
|
||||||
}
|
}
|
||||||
Segment::Spacing(spacing) => match spacing {
|
Segment::Spacing(spacing) => match spacing {
|
||||||
Spacing::Relative(v) => {
|
Spacing::Relative(v) => {
|
||||||
@ -574,18 +577,18 @@ fn shape_range<'a>(
|
|||||||
let mut cursor = range.start;
|
let mut cursor = range.start;
|
||||||
|
|
||||||
// Group by embedding level and script.
|
// Group by embedding level and script.
|
||||||
for i in cursor .. range.end {
|
for i in cursor..range.end {
|
||||||
if !bidi.text.is_char_boundary(i) {
|
if !bidi.text.is_char_boundary(i) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let level = bidi.levels[i];
|
let level = bidi.levels[i];
|
||||||
let script =
|
let script =
|
||||||
bidi.text[i ..].chars().next().map_or(Script::Unknown, |c| c.script());
|
bidi.text[i..].chars().next().map_or(Script::Unknown, |c| c.script());
|
||||||
|
|
||||||
if level != prev_level || !is_compatible(script, prev_script) {
|
if level != prev_level || !is_compatible(script, prev_script) {
|
||||||
if cursor < i {
|
if cursor < i {
|
||||||
process(&bidi.text[cursor .. i], prev_level);
|
process(&bidi.text[cursor..i], prev_level);
|
||||||
}
|
}
|
||||||
cursor = i;
|
cursor = i;
|
||||||
prev_level = level;
|
prev_level = level;
|
||||||
@ -595,7 +598,7 @@ fn shape_range<'a>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
process(&bidi.text[cursor .. range.end], prev_level);
|
process(&bidi.text[cursor..range.end], prev_level);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether this is not a specific script.
|
/// Whether this is not a specific script.
|
||||||
@ -655,7 +658,7 @@ fn linebreak_simple<'a>(
|
|||||||
|
|
||||||
for (end, mandatory, hyphen) in breakpoints(p) {
|
for (end, mandatory, hyphen) in breakpoints(p) {
|
||||||
// Compute the line and its size.
|
// Compute the line and its size.
|
||||||
let mut attempt = line(p, world, start .. end, mandatory, hyphen);
|
let mut attempt = line(p, world, start..end, mandatory, hyphen);
|
||||||
|
|
||||||
// If the line doesn't fit anymore, we push the last fitting attempt
|
// If the line doesn't fit anymore, we push the last fitting attempt
|
||||||
// into the stack and rebuild the line from the attempt's end. The
|
// into the stack and rebuild the line from the attempt's end. The
|
||||||
@ -664,7 +667,7 @@ fn linebreak_simple<'a>(
|
|||||||
if let Some((last_attempt, last_end)) = last.take() {
|
if let Some((last_attempt, last_end)) = last.take() {
|
||||||
lines.push(last_attempt);
|
lines.push(last_attempt);
|
||||||
start = last_end;
|
start = last_end;
|
||||||
attempt = line(p, world, start .. end, mandatory, hyphen);
|
attempt = line(p, world, start..end, mandatory, hyphen);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -731,7 +734,7 @@ fn linebreak_optimized<'a>(
|
|||||||
let mut table = vec![Entry {
|
let mut table = vec![Entry {
|
||||||
pred: 0,
|
pred: 0,
|
||||||
total: 0.0,
|
total: 0.0,
|
||||||
line: line(p, world, 0 .. 0, false, false),
|
line: line(p, world, 0..0, false, false),
|
||||||
}];
|
}];
|
||||||
|
|
||||||
let em = p.styles.get(TextNode::SIZE);
|
let em = p.styles.get(TextNode::SIZE);
|
||||||
@ -745,7 +748,7 @@ fn linebreak_optimized<'a>(
|
|||||||
for (i, pred) in table.iter_mut().enumerate().skip(active) {
|
for (i, pred) in table.iter_mut().enumerate().skip(active) {
|
||||||
// Layout the line.
|
// Layout the line.
|
||||||
let start = pred.line.end;
|
let start = pred.line.end;
|
||||||
let attempt = line(p, world, start .. end, mandatory, hyphen);
|
let attempt = line(p, world, start..end, mandatory, hyphen);
|
||||||
|
|
||||||
// Determine how much the line's spaces would need to be stretched
|
// Determine how much the line's spaces would need to be stretched
|
||||||
// to make it the desired width.
|
// to make it the desired width.
|
||||||
@ -877,7 +880,7 @@ impl Iterator for Breakpoints<'_> {
|
|||||||
// Hyphenate the next word.
|
// Hyphenate the next word.
|
||||||
if self.p.hyphenate != Some(false) {
|
if self.p.hyphenate != Some(false) {
|
||||||
if let Some(lang) = self.lang(self.offset) {
|
if let Some(lang) = self.lang(self.offset) {
|
||||||
let word = &self.p.bidi.text[self.offset .. self.end];
|
let word = &self.p.bidi.text[self.offset..self.end];
|
||||||
let trimmed = word.trim_end_matches(|c: char| !c.is_alphabetic());
|
let trimmed = word.trim_end_matches(|c: char| !c.is_alphabetic());
|
||||||
if !trimmed.is_empty() {
|
if !trimmed.is_empty() {
|
||||||
self.suffix = self.offset + trimmed.len();
|
self.suffix = self.offset + trimmed.len();
|
||||||
@ -953,7 +956,7 @@ fn line<'a>(
|
|||||||
// end of the line.
|
// end of the line.
|
||||||
let base = expanded.end - shaped.text.len();
|
let base = expanded.end - shaped.text.len();
|
||||||
let start = range.start.max(base);
|
let start = range.start.max(base);
|
||||||
let text = &p.bidi.text[start .. range.end];
|
let text = &p.bidi.text[start..range.end];
|
||||||
let trimmed = text.trim_end();
|
let trimmed = text.trim_end();
|
||||||
range.end = start + trimmed.len();
|
range.end = start + trimmed.len();
|
||||||
|
|
||||||
@ -973,7 +976,7 @@ fn line<'a>(
|
|||||||
// are no other items in the line.
|
// are no other items in the line.
|
||||||
if hyphen || start + shaped.text.len() > range.end {
|
if hyphen || start + shaped.text.len() > range.end {
|
||||||
if hyphen || start < range.end || before.is_empty() {
|
if hyphen || start < range.end || before.is_empty() {
|
||||||
let shifted = start - base .. range.end - base;
|
let shifted = start - base..range.end - base;
|
||||||
let mut reshaped = shaped.reshape(world, shifted);
|
let mut reshaped = shaped.reshape(world, shifted);
|
||||||
if hyphen || shy {
|
if hyphen || shy {
|
||||||
reshaped.push_hyphen(world);
|
reshaped.push_hyphen(world);
|
||||||
@ -996,7 +999,7 @@ fn line<'a>(
|
|||||||
// Reshape if necessary.
|
// Reshape if necessary.
|
||||||
if range.start + shaped.text.len() > end {
|
if range.start + shaped.text.len() > end {
|
||||||
if range.start < end {
|
if range.start < end {
|
||||||
let shifted = range.start - base .. end - base;
|
let shifted = range.start - base..end - base;
|
||||||
let reshaped = shaped.reshape(world, shifted);
|
let reshaped = shaped.reshape(world, shifted);
|
||||||
width += reshaped.width;
|
width += reshaped.width;
|
||||||
first = Some(Item::Text(reshaped));
|
first = Some(Item::Text(reshaped));
|
||||||
@ -1168,7 +1171,7 @@ fn commit(
|
|||||||
offset += p.align.position(remaining);
|
offset += p.align.position(remaining);
|
||||||
}
|
}
|
||||||
if width > Abs::zero() {
|
if width > Abs::zero() {
|
||||||
for _ in 0 .. (count as usize).min(1000) {
|
for _ in 0..(count as usize).min(1000) {
|
||||||
push(&mut offset, frame.clone());
|
push(&mut offset, frame.clone());
|
||||||
offset += apart;
|
offset += apart;
|
||||||
}
|
}
|
||||||
@ -1229,7 +1232,7 @@ fn reorder<'a>(line: &'a Line<'a>) -> Vec<&Item<'a>> {
|
|||||||
reordered.extend(line.slice(run.clone()));
|
reordered.extend(line.slice(run.clone()));
|
||||||
|
|
||||||
if levels[run.start].is_rtl() {
|
if levels[run.start].is_rtl() {
|
||||||
reordered[prev ..].reverse();
|
reordered[prev..].reverse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,22 +117,38 @@ impl<'s> Quotes<'s> {
|
|||||||
|
|
||||||
/// The opening quote.
|
/// The opening quote.
|
||||||
fn open(&self, double: bool) -> &'s str {
|
fn open(&self, double: bool) -> &'s str {
|
||||||
if double { self.double_open } else { self.single_open }
|
if double {
|
||||||
|
self.double_open
|
||||||
|
} else {
|
||||||
|
self.single_open
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The closing quote.
|
/// The closing quote.
|
||||||
fn close(&self, double: bool) -> &'s str {
|
fn close(&self, double: bool) -> &'s str {
|
||||||
if double { self.double_close } else { self.single_close }
|
if double {
|
||||||
|
self.double_close
|
||||||
|
} else {
|
||||||
|
self.single_close
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Which character should be used as a prime.
|
/// Which character should be used as a prime.
|
||||||
fn prime(&self, double: bool) -> &'static str {
|
fn prime(&self, double: bool) -> &'static str {
|
||||||
if double { "″" } else { "′" }
|
if double {
|
||||||
|
"″"
|
||||||
|
} else {
|
||||||
|
"′"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Which character should be used as a fallback quote.
|
/// Which character should be used as a fallback quote.
|
||||||
fn fallback(&self, double: bool) -> &'static str {
|
fn fallback(&self, double: bool) -> &'static str {
|
||||||
if double { "\"" } else { "’" }
|
if double {
|
||||||
|
"\""
|
||||||
|
} else {
|
||||||
|
"’"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +98,6 @@ impl<'a> ShapedText<'a> {
|
|||||||
self.glyphs.as_ref().group_by_key(|g| (g.font.clone(), g.y_offset))
|
self.glyphs.as_ref().group_by_key(|g| (g.font.clone(), g.y_offset))
|
||||||
{
|
{
|
||||||
let pos = Point::new(offset, top + shift + y_offset.at(self.size));
|
let pos = Point::new(offset, top + shift + y_offset.at(self.size));
|
||||||
|
|
||||||
let glyphs = group
|
let glyphs = group
|
||||||
.iter()
|
.iter()
|
||||||
.map(|glyph| Glyph {
|
.map(|glyph| Glyph {
|
||||||
@ -115,14 +114,7 @@ impl<'a> ShapedText<'a> {
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let text = Text {
|
let text = Text { font, size: self.size, lang, fill, glyphs };
|
||||||
font,
|
|
||||||
size: self.size,
|
|
||||||
lang,
|
|
||||||
fill,
|
|
||||||
glyphs,
|
|
||||||
};
|
|
||||||
|
|
||||||
let text_layer = frame.layer();
|
let text_layer = frame.layer();
|
||||||
let width = text.width();
|
let width = text.width();
|
||||||
|
|
||||||
@ -253,7 +245,7 @@ impl<'a> ShapedText<'a> {
|
|||||||
|
|
||||||
let left = self.find_safe_to_break(start, Side::Left)?;
|
let left = self.find_safe_to_break(start, Side::Left)?;
|
||||||
let right = self.find_safe_to_break(end, Side::Right)?;
|
let right = self.find_safe_to_break(end, Side::Right)?;
|
||||||
Some(&self.glyphs[left .. right])
|
Some(&self.glyphs[left..right])
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find the glyph offset matching the text index that is most towards the
|
/// Find the glyph offset matching the text index that is most towards the
|
||||||
@ -274,7 +266,11 @@ impl<'a> ShapedText<'a> {
|
|||||||
.glyphs
|
.glyphs
|
||||||
.binary_search_by(|g| {
|
.binary_search_by(|g| {
|
||||||
let ordering = g.cluster.cmp(&text_index);
|
let ordering = g.cluster.cmp(&text_index);
|
||||||
if ltr { ordering } else { ordering.reverse() }
|
if ltr {
|
||||||
|
ordering
|
||||||
|
} else {
|
||||||
|
ordering.reverse()
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
@ -385,9 +381,7 @@ fn shape_segment<'a>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Extract the font id or shape notdef glyphs if we couldn't find any font.
|
// Extract the font id or shape notdef glyphs if we couldn't find any font.
|
||||||
let font = if let Some(font) = selection {
|
let Some(font) = selection else {
|
||||||
font
|
|
||||||
} else {
|
|
||||||
if let Some(font) = ctx.used.first().cloned() {
|
if let Some(font) = ctx.used.first().cloned() {
|
||||||
shape_tofus(ctx, base, text, font);
|
shape_tofus(ctx, base, text, font);
|
||||||
}
|
}
|
||||||
@ -429,7 +423,7 @@ fn shape_segment<'a>(
|
|||||||
y_offset: font.to_em(pos[i].y_offset),
|
y_offset: font.to_em(pos[i].y_offset),
|
||||||
cluster: base + cluster,
|
cluster: base + cluster,
|
||||||
safe_to_break: !info.unsafe_to_break(),
|
safe_to_break: !info.unsafe_to_break(),
|
||||||
c: text[cluster ..].chars().next().unwrap(),
|
c: text[cluster..].chars().next().unwrap(),
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// Determine the source text range for the tofu sequence.
|
// Determine the source text range for the tofu sequence.
|
||||||
@ -466,11 +460,11 @@ fn shape_segment<'a>(
|
|||||||
.and_then(|last| infos.get(last))
|
.and_then(|last| infos.get(last))
|
||||||
.map_or(text.len(), |info| info.cluster as usize);
|
.map_or(text.len(), |info| info.cluster as usize);
|
||||||
|
|
||||||
start .. end
|
start..end
|
||||||
};
|
};
|
||||||
|
|
||||||
// Trim half-baked cluster.
|
// Trim half-baked cluster.
|
||||||
let remove = base + range.start .. base + range.end;
|
let remove = base + range.start..base + range.end;
|
||||||
while ctx.glyphs.last().map_or(false, |g| remove.contains(&g.cluster)) {
|
while ctx.glyphs.last().map_or(false, |g| remove.contains(&g.cluster)) {
|
||||||
ctx.glyphs.pop();
|
ctx.glyphs.pop();
|
||||||
}
|
}
|
||||||
|
@ -78,10 +78,7 @@ fn search_text(content: &Content, mode: ShiftKind) -> Option<EcoString> {
|
|||||||
} else if content.is::<SpaceNode>() {
|
} else if content.is::<SpaceNode>() {
|
||||||
Some(' '.into())
|
Some(' '.into())
|
||||||
} else if let Some(text) = content.downcast::<TextNode>() {
|
} else if let Some(text) = content.downcast::<TextNode>() {
|
||||||
if let Some(sup) = convert_script(&text.0, mode) {
|
convert_script(&text.0, mode)
|
||||||
return Some(sup);
|
|
||||||
}
|
|
||||||
None
|
|
||||||
} else if let Some(seq) = content.downcast::<SequenceNode>() {
|
} else if let Some(seq) = content.downcast::<SequenceNode>() {
|
||||||
let mut full = EcoString::new();
|
let mut full = EcoString::new();
|
||||||
for item in seq.0.iter() {
|
for item in seq.0.iter() {
|
||||||
@ -138,7 +135,7 @@ fn to_superscript_codepoint(c: char) -> Option<char> {
|
|||||||
'1' => 0x00B9,
|
'1' => 0x00B9,
|
||||||
'2' => 0x00B2,
|
'2' => 0x00B2,
|
||||||
'3' => 0x00B3,
|
'3' => 0x00B3,
|
||||||
'4' ..= '9' => 0x2070 + (c as u32 + 4 - '4' as u32),
|
'4'..='9' => 0x2070 + (c as u32 + 4 - '4' as u32),
|
||||||
'+' => 0x207A,
|
'+' => 0x207A,
|
||||||
'-' => 0x207B,
|
'-' => 0x207B,
|
||||||
'=' => 0x207C,
|
'=' => 0x207C,
|
||||||
@ -155,7 +152,7 @@ fn to_superscript_codepoint(c: char) -> Option<char> {
|
|||||||
fn to_subscript_codepoint(c: char) -> Option<char> {
|
fn to_subscript_codepoint(c: char) -> Option<char> {
|
||||||
char::from_u32(match c {
|
char::from_u32(match c {
|
||||||
'0' => 0x2080,
|
'0' => 0x2080,
|
||||||
'1' ..= '9' => 0x2080 + (c as u32 - '0' as u32),
|
'1'..='9' => 0x2080 + (c as u32 - '0' as u32),
|
||||||
'+' => 0x208A,
|
'+' => 0x208A,
|
||||||
'-' => 0x208B,
|
'-' => 0x208B,
|
||||||
'=' => 0x208C,
|
'=' => 0x208C,
|
||||||
|
@ -18,7 +18,8 @@ pub fn capability(_: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
quote! {
|
quote! {
|
||||||
#item_trait
|
#item_trait
|
||||||
impl ::typst::model::Capability for dyn #name {}
|
impl ::typst::model::Capability for dyn #name {}
|
||||||
}.into()
|
}
|
||||||
|
.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implement `Node` for a struct.
|
/// Implement `Node` for a struct.
|
||||||
@ -349,10 +350,7 @@ fn parse_property(item: &mut syn::ImplItemConst) -> Result<Property> {
|
|||||||
|
|
||||||
let span = property.name.span();
|
let span = property.name.span();
|
||||||
if property.skip && property.shorthand.is_some() {
|
if property.skip && property.shorthand.is_some() {
|
||||||
return Err(Error::new(
|
return Err(Error::new(span, "skip and shorthand are mutually exclusive"));
|
||||||
span,
|
|
||||||
"skip and shorthand are mutually exclusive",
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if property.referenced && (property.fold || property.resolve) {
|
if property.referenced && (property.fold || property.resolve) {
|
||||||
|
13
rustfmt.toml
13
rustfmt.toml
@ -1,11 +1,6 @@
|
|||||||
unstable_features = true
|
use_small_heuristics = "Max"
|
||||||
|
max_width = 90
|
||||||
overflow_delimited_expr = true
|
chain_width = 70
|
||||||
spaces_around_ranges = true
|
struct_lit_width = 50
|
||||||
use_field_init_shorthand = true
|
use_field_init_shorthand = true
|
||||||
merge_derives = false
|
merge_derives = false
|
||||||
|
|
||||||
max_width = 90
|
|
||||||
struct_lit_width = 40
|
|
||||||
chain_width = 70
|
|
||||||
single_line_if_else_max_width = 60
|
|
||||||
|
@ -85,8 +85,8 @@ impl SourceError {
|
|||||||
let full = world.source(self.span.source()).range(self.span);
|
let full = world.source(self.span.source()).range(self.span);
|
||||||
match self.pos {
|
match self.pos {
|
||||||
ErrorPos::Full => full,
|
ErrorPos::Full => full,
|
||||||
ErrorPos::Start => full.start .. full.start,
|
ErrorPos::Start => full.start..full.start,
|
||||||
ErrorPos::End => full.end .. full.end,
|
ErrorPos::End => full.end..full.end,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,5 +3,5 @@
|
|||||||
mod pdf;
|
mod pdf;
|
||||||
mod render;
|
mod render;
|
||||||
|
|
||||||
pub use pdf::pdf;
|
pub use self::pdf::pdf;
|
||||||
pub use render::render;
|
pub use self::render::render;
|
||||||
|
@ -174,7 +174,7 @@ where
|
|||||||
&'a self,
|
&'a self,
|
||||||
refs: &'a [Ref],
|
refs: &'a [Ref],
|
||||||
) -> impl Iterator<Item = (Ref, usize)> + 'a {
|
) -> impl Iterator<Item = (Ref, usize)> + 'a {
|
||||||
refs.iter().copied().zip(0 .. self.to_pdf.len())
|
refs.iter().copied().zip(0..self.to_pdf.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn items(&self) -> impl Iterator<Item = &T> + '_ {
|
fn items(&self) -> impl Iterator<Item = &T> + '_ {
|
||||||
|
@ -256,8 +256,8 @@ fn render_outline_glyph(
|
|||||||
// Blend the glyph bitmap with the existing pixels on the canvas.
|
// Blend the glyph bitmap with the existing pixels on the canvas.
|
||||||
// FIXME: This doesn't respect the clipping mask.
|
// FIXME: This doesn't respect the clipping mask.
|
||||||
let pixels = bytemuck::cast_slice_mut::<u8, u32>(canvas.data_mut());
|
let pixels = bytemuck::cast_slice_mut::<u8, u32>(canvas.data_mut());
|
||||||
for x in left.clamp(0, cw) .. right.clamp(0, cw) {
|
for x in left.clamp(0, cw)..right.clamp(0, cw) {
|
||||||
for y in top.clamp(0, ch) .. bottom.clamp(0, ch) {
|
for y in top.clamp(0, ch)..bottom.clamp(0, ch) {
|
||||||
let ai = ((y - top) * mw + (x - left)) as usize;
|
let ai = ((y - top) * mw + (x - left)) as usize;
|
||||||
let cov = bitmap.coverage[ai];
|
let cov = bitmap.coverage[ai];
|
||||||
if cov == 0 {
|
if cov == 0 {
|
||||||
@ -312,10 +312,7 @@ fn render_shape(
|
|||||||
|
|
||||||
if let Some(Stroke { paint, thickness }) = shape.stroke {
|
if let Some(Stroke { paint, thickness }) = shape.stroke {
|
||||||
let paint = paint.into();
|
let paint = paint.into();
|
||||||
let stroke = sk::Stroke {
|
let stroke = sk::Stroke { width: thickness.to_f32(), ..Default::default() };
|
||||||
width: thickness.to_f32(),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
canvas.stroke_path(&path, &paint, &stroke, ts, mask);
|
canvas.stroke_path(&path, &paint, &stroke, ts, mask);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -342,11 +339,8 @@ fn render_image(
|
|||||||
match image.decode().unwrap() {
|
match image.decode().unwrap() {
|
||||||
DecodedImage::Raster(dynamic, _) => {
|
DecodedImage::Raster(dynamic, _) => {
|
||||||
let downscale = w < image.width();
|
let downscale = w < image.width();
|
||||||
let filter = if downscale {
|
let filter =
|
||||||
FilterType::Lanczos3
|
if downscale { FilterType::Lanczos3 } else { FilterType::CatmullRom };
|
||||||
} else {
|
|
||||||
FilterType::CatmullRom
|
|
||||||
};
|
|
||||||
let buf = dynamic.resize(w, h, filter);
|
let buf = dynamic.resize(w, h, filter);
|
||||||
for ((_, _, src), dest) in buf.pixels().zip(pixmap.pixels_mut()) {
|
for ((_, _, src), dest) in buf.pixels().zip(pixmap.pixels_mut()) {
|
||||||
let Rgba([r, g, b, a]) = src;
|
let Rgba([r, g, b, a]) = src;
|
||||||
|
@ -172,7 +172,7 @@ impl FontInfo {
|
|||||||
/// Compute metadata for all fonts in the given data.
|
/// Compute metadata for all fonts in the given data.
|
||||||
pub fn from_data(data: &[u8]) -> impl Iterator<Item = FontInfo> + '_ {
|
pub fn from_data(data: &[u8]) -> impl Iterator<Item = FontInfo> + '_ {
|
||||||
let count = ttf_parser::fonts_in_collection(data).unwrap_or(1);
|
let count = ttf_parser::fonts_in_collection(data).unwrap_or(1);
|
||||||
(0 .. count).filter_map(move |index| {
|
(0..count).filter_map(move |index| {
|
||||||
let ttf = ttf_parser::Face::parse(data, index).ok()?;
|
let ttf = ttf_parser::Face::parse(data, index).ok()?;
|
||||||
Self::from_ttf(&ttf)
|
Self::from_ttf(&ttf)
|
||||||
})
|
})
|
||||||
@ -241,9 +241,9 @@ impl FontInfo {
|
|||||||
if let Some(panose) = ttf
|
if let Some(panose) = ttf
|
||||||
.raw_face()
|
.raw_face()
|
||||||
.table(Tag::from_bytes(b"OS/2"))
|
.table(Tag::from_bytes(b"OS/2"))
|
||||||
.and_then(|os2| os2.get(32 .. 45))
|
.and_then(|os2| os2.get(32..45))
|
||||||
{
|
{
|
||||||
if matches!(panose, [2, 2 ..= 10, ..]) {
|
if matches!(panose, [2, 2..=10, ..]) {
|
||||||
flags.insert(FontFlags::SERIF);
|
flags.insert(FontFlags::SERIF);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -305,9 +305,8 @@ fn typographic_family(mut family: &str) -> &str {
|
|||||||
const SEPARATORS: [char; 3] = [' ', '-', '_'];
|
const SEPARATORS: [char; 3] = [' ', '-', '_'];
|
||||||
|
|
||||||
// Modifiers that can appear in combination with suffixes.
|
// Modifiers that can appear in combination with suffixes.
|
||||||
const MODIFIERS: &[&str] = &[
|
const MODIFIERS: &[&str] =
|
||||||
"extra", "ext", "ex", "x", "semi", "sem", "sm", "demi", "dem", "ultra",
|
&["extra", "ext", "ex", "x", "semi", "sem", "sm", "demi", "dem", "ultra"];
|
||||||
];
|
|
||||||
|
|
||||||
// Style suffixes.
|
// Style suffixes.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
@ -331,9 +330,8 @@ fn typographic_family(mut family: &str) -> &str {
|
|||||||
len = trimmed.len();
|
len = trimmed.len();
|
||||||
|
|
||||||
// Find style suffix.
|
// Find style suffix.
|
||||||
let mut t = match SUFFIXES.iter().find_map(|s| trimmed.strip_suffix(s)) {
|
let Some(mut t) = SUFFIXES.iter().find_map(|s| trimmed.strip_suffix(s)) else {
|
||||||
Some(t) => t,
|
break;
|
||||||
None => break,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Strip optional separator.
|
// Strip optional separator.
|
||||||
@ -351,7 +349,7 @@ fn typographic_family(mut family: &str) -> &str {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
&family[.. len]
|
&family[..len]
|
||||||
}
|
}
|
||||||
|
|
||||||
/// How many words the two strings share in their prefix.
|
/// How many words the two strings share in their prefix.
|
||||||
@ -411,7 +409,7 @@ impl Coverage {
|
|||||||
let mut cursor = 0;
|
let mut cursor = 0;
|
||||||
|
|
||||||
for &run in &self.0 {
|
for &run in &self.0 {
|
||||||
if (cursor .. cursor + run).contains(&c) {
|
if (cursor..cursor + run).contains(&c) {
|
||||||
return inside;
|
return inside;
|
||||||
}
|
}
|
||||||
cursor += run;
|
cursor += run;
|
||||||
@ -432,19 +430,13 @@ mod tests {
|
|||||||
assert_eq!(typographic_family("eras bold"), "eras");
|
assert_eq!(typographic_family("eras bold"), "eras");
|
||||||
assert_eq!(typographic_family("footlight mt light"), "footlight mt");
|
assert_eq!(typographic_family("footlight mt light"), "footlight mt");
|
||||||
assert_eq!(typographic_family("times new roman"), "times new roman");
|
assert_eq!(typographic_family("times new roman"), "times new roman");
|
||||||
assert_eq!(
|
assert_eq!(typographic_family("noto sans mono cond sembd"), "noto sans mono");
|
||||||
typographic_family("noto sans mono cond sembd"),
|
|
||||||
"noto sans mono"
|
|
||||||
);
|
|
||||||
assert_eq!(typographic_family("noto serif SEMCOND sembd"), "noto serif");
|
assert_eq!(typographic_family("noto serif SEMCOND sembd"), "noto serif");
|
||||||
assert_eq!(typographic_family("crimson text"), "crimson text");
|
assert_eq!(typographic_family("crimson text"), "crimson text");
|
||||||
assert_eq!(typographic_family("footlight light"), "footlight");
|
assert_eq!(typographic_family("footlight light"), "footlight");
|
||||||
assert_eq!(typographic_family("Noto Sans"), "Noto Sans");
|
assert_eq!(typographic_family("Noto Sans"), "Noto Sans");
|
||||||
assert_eq!(typographic_family("Noto Sans Light"), "Noto Sans");
|
assert_eq!(typographic_family("Noto Sans Light"), "Noto Sans");
|
||||||
assert_eq!(
|
assert_eq!(typographic_family("Noto Sans Semicondensed Heavy"), "Noto Sans");
|
||||||
typographic_family("Noto Sans Semicondensed Heavy"),
|
|
||||||
"Noto Sans"
|
|
||||||
);
|
|
||||||
assert_eq!(typographic_family("Familx"), "Familx");
|
assert_eq!(typographic_family("Familx"), "Familx");
|
||||||
assert_eq!(typographic_family("Font Ultra"), "Font Ultra");
|
assert_eq!(typographic_family("Font Ultra"), "Font Ultra");
|
||||||
assert_eq!(typographic_family("Font Ultra Bold"), "Font");
|
assert_eq!(typographic_family("Font Ultra Bold"), "Font");
|
||||||
@ -458,7 +450,7 @@ mod tests {
|
|||||||
assert_eq!(coverage.0, runs);
|
assert_eq!(coverage.0, runs);
|
||||||
|
|
||||||
let max = 5 + set.iter().copied().max().unwrap_or_default();
|
let max = 5 + set.iter().copied().max().unwrap_or_default();
|
||||||
for c in 0 .. max {
|
for c in 0..max {
|
||||||
assert_eq!(set.contains(&c), coverage.contains(c));
|
assert_eq!(set.contains(&c), coverage.contains(c));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
mod book;
|
mod book;
|
||||||
mod variant;
|
mod variant;
|
||||||
|
|
||||||
pub use book::*;
|
pub use self::book::*;
|
||||||
pub use variant::*;
|
pub use self::variant::*;
|
||||||
|
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher};
|
||||||
|
15
src/frame.rs
15
src/frame.rs
@ -153,7 +153,7 @@ impl Frame {
|
|||||||
where
|
where
|
||||||
I: IntoIterator<Item = (Point, Element)>,
|
I: IntoIterator<Item = (Point, Element)>,
|
||||||
{
|
{
|
||||||
Arc::make_mut(&mut self.elements).splice(0 .. 0, elements);
|
Arc::make_mut(&mut self.elements).splice(0..0, elements);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a frame at a position in the background.
|
/// Add a frame at a position in the background.
|
||||||
@ -181,7 +181,7 @@ impl Frame {
|
|||||||
|
|
||||||
// Try to transfer the elements without adjusting the position.
|
// Try to transfer the elements without adjusting the position.
|
||||||
// Also try to reuse the elements if the Arc isn't shared.
|
// Also try to reuse the elements if the Arc isn't shared.
|
||||||
let range = layer .. layer;
|
let range = layer..layer;
|
||||||
if pos.is_zero() {
|
if pos.is_zero() {
|
||||||
let sink = Arc::make_mut(&mut self.elements);
|
let sink = Arc::make_mut(&mut self.elements);
|
||||||
match Arc::try_unwrap(frame.elements) {
|
match Arc::try_unwrap(frame.elements) {
|
||||||
@ -407,7 +407,7 @@ impl Lang {
|
|||||||
|
|
||||||
/// Return the language code as an all lowercase string slice.
|
/// Return the language code as an all lowercase string slice.
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn as_str(&self) -> &str {
|
||||||
std::str::from_utf8(&self.0[.. usize::from(self.1)]).unwrap_or_default()
|
std::str::from_utf8(&self.0[..usize::from(self.1)]).unwrap_or_default()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The default direction for the language.
|
/// The default direction for the language.
|
||||||
@ -426,9 +426,9 @@ impl FromStr for Lang {
|
|||||||
/// Construct a language from a two- or three-byte ISO 639-1/2/3 code.
|
/// Construct a language from a two- or three-byte ISO 639-1/2/3 code.
|
||||||
fn from_str(iso: &str) -> Result<Self, Self::Err> {
|
fn from_str(iso: &str) -> Result<Self, Self::Err> {
|
||||||
let len = iso.len();
|
let len = iso.len();
|
||||||
if matches!(len, 2 ..= 3) && iso.is_ascii() {
|
if matches!(len, 2..=3) && iso.is_ascii() {
|
||||||
let mut bytes = [b' '; 3];
|
let mut bytes = [b' '; 3];
|
||||||
bytes[.. len].copy_from_slice(iso.as_bytes());
|
bytes[..len].copy_from_slice(iso.as_bytes());
|
||||||
bytes.make_ascii_lowercase();
|
bytes.make_ascii_lowercase();
|
||||||
Ok(Self(bytes, len as u8))
|
Ok(Self(bytes, len as u8))
|
||||||
} else {
|
} else {
|
||||||
@ -538,9 +538,6 @@ impl Role {
|
|||||||
pub fn is_weak(self) -> bool {
|
pub fn is_weak(self) -> bool {
|
||||||
// In Typst, all text is in a paragraph, so paragraph isn't very
|
// In Typst, all text is in a paragraph, so paragraph isn't very
|
||||||
// descriptive.
|
// descriptive.
|
||||||
matches!(
|
matches!(self, Self::Paragraph | Self::GenericBlock | Self::GenericInline)
|
||||||
self,
|
|
||||||
Self::Paragraph | Self::GenericBlock | Self::GenericInline
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -54,10 +54,7 @@ impl<T> Axes<T> {
|
|||||||
|
|
||||||
/// Zip two instances into an instance over a tuple.
|
/// Zip two instances into an instance over a tuple.
|
||||||
pub fn zip<U>(self, other: Axes<U>) -> Axes<(T, U)> {
|
pub fn zip<U>(self, other: Axes<U>) -> Axes<(T, U)> {
|
||||||
Axes {
|
Axes { x: (self.x, other.x), y: (self.y, other.y) }
|
||||||
x: (self.x, other.x),
|
|
||||||
y: (self.y, other.y),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether a condition is true for at least one of fields.
|
/// Whether a condition is true for at least one of fields.
|
||||||
@ -100,18 +97,12 @@ impl<T: Default> Axes<T> {
|
|||||||
impl<T: Ord> Axes<T> {
|
impl<T: Ord> Axes<T> {
|
||||||
/// The component-wise minimum of this and another instance.
|
/// The component-wise minimum of this and another instance.
|
||||||
pub fn min(self, other: Self) -> Self {
|
pub fn min(self, other: Self) -> Self {
|
||||||
Self {
|
Self { x: self.x.min(other.x), y: self.y.min(other.y) }
|
||||||
x: self.x.min(other.x),
|
|
||||||
y: self.y.min(other.y),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The component-wise minimum of this and another instance.
|
/// The component-wise minimum of this and another instance.
|
||||||
pub fn max(self, other: Self) -> Self {
|
pub fn max(self, other: Self) -> Self {
|
||||||
Self {
|
Self { x: self.x.max(other.x), y: self.y.max(other.y) }
|
||||||
x: self.x.max(other.x),
|
|
||||||
y: self.y.max(other.y),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,12 +16,7 @@ pub struct Corners<T> {
|
|||||||
impl<T> Corners<T> {
|
impl<T> Corners<T> {
|
||||||
/// Create a new instance from the four components.
|
/// Create a new instance from the four components.
|
||||||
pub const fn new(top_left: T, top_right: T, bottom_right: T, bottom_left: T) -> Self {
|
pub const fn new(top_left: T, top_right: T, bottom_right: T, bottom_left: T) -> Self {
|
||||||
Self {
|
Self { top_left, top_right, bottom_right, bottom_left }
|
||||||
top_left,
|
|
||||||
top_right,
|
|
||||||
bottom_right,
|
|
||||||
bottom_left,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an instance with four equal components.
|
/// Create an instance with four equal components.
|
||||||
@ -66,12 +61,7 @@ impl<T> Corners<T> {
|
|||||||
/// An iterator over the corners, starting with the top left corner,
|
/// An iterator over the corners, starting with the top left corner,
|
||||||
/// clockwise.
|
/// clockwise.
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
pub fn iter(&self) -> impl Iterator<Item = &T> {
|
||||||
[
|
[&self.top_left, &self.top_right, &self.bottom_right, &self.bottom_left]
|
||||||
&self.top_left,
|
|
||||||
&self.top_right,
|
|
||||||
&self.bottom_right,
|
|
||||||
&self.bottom_left,
|
|
||||||
]
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,9 +18,5 @@ pub fn ellipse(size: Size, fill: Option<Paint>, stroke: Option<Stroke>) -> Shape
|
|||||||
path.cubic_to(point(rx, my), point(mx, ry), point(z, ry));
|
path.cubic_to(point(rx, my), point(mx, ry), point(z, ry));
|
||||||
path.cubic_to(point(-mx, ry), point(-rx, my), point(-rx, z));
|
path.cubic_to(point(-mx, ry), point(-rx, my), point(-rx, z));
|
||||||
|
|
||||||
Shape {
|
Shape { geometry: Geometry::Path(path), stroke, fill }
|
||||||
geometry: Geometry::Path(path),
|
|
||||||
stroke,
|
|
||||||
fill,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,11 @@ impl Em {
|
|||||||
/// Convert to an absolute length at the given font size.
|
/// Convert to an absolute length at the given font size.
|
||||||
pub fn at(self, font_size: Abs) -> Abs {
|
pub fn at(self, font_size: Abs) -> Abs {
|
||||||
let resolved = font_size * self.get();
|
let resolved = font_size * self.get();
|
||||||
if resolved.is_finite() { resolved } else { Abs::zero() }
|
if resolved.is_finite() {
|
||||||
|
resolved
|
||||||
|
} else {
|
||||||
|
Abs::zero()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,10 +92,7 @@ impl Add for Length {
|
|||||||
type Output = Self;
|
type Output = Self;
|
||||||
|
|
||||||
fn add(self, rhs: Self) -> Self::Output {
|
fn add(self, rhs: Self) -> Self::Output {
|
||||||
Self {
|
Self { abs: self.abs + rhs.abs, em: self.em + rhs.em }
|
||||||
abs: self.abs + rhs.abs,
|
|
||||||
em: self.em + rhs.em,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -24,27 +24,27 @@ mod size;
|
|||||||
mod stroke;
|
mod stroke;
|
||||||
mod transform;
|
mod transform;
|
||||||
|
|
||||||
pub use abs::*;
|
pub use self::abs::*;
|
||||||
pub use align::*;
|
pub use self::align::*;
|
||||||
pub use angle::*;
|
pub use self::angle::*;
|
||||||
pub use axes::*;
|
pub use self::axes::*;
|
||||||
pub use corners::*;
|
pub use self::corners::*;
|
||||||
pub use dir::*;
|
pub use self::dir::*;
|
||||||
pub use ellipse::*;
|
pub use self::ellipse::*;
|
||||||
pub use em::*;
|
pub use self::em::*;
|
||||||
pub use fr::*;
|
pub use self::fr::*;
|
||||||
pub use length::*;
|
pub use self::length::*;
|
||||||
pub use paint::*;
|
pub use self::paint::*;
|
||||||
pub use path::*;
|
pub use self::path::*;
|
||||||
pub use point::*;
|
pub use self::point::*;
|
||||||
pub use ratio::*;
|
pub use self::ratio::*;
|
||||||
pub use rel::*;
|
pub use self::rel::*;
|
||||||
pub use rounded::*;
|
pub use self::rounded::*;
|
||||||
pub use scalar::*;
|
pub use self::scalar::*;
|
||||||
pub use sides::*;
|
pub use self::sides::*;
|
||||||
pub use size::*;
|
pub use self::size::*;
|
||||||
pub use stroke::*;
|
pub use self::stroke::*;
|
||||||
pub use transform::*;
|
pub use self::transform::*;
|
||||||
|
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::f64::consts::PI;
|
use std::f64::consts::PI;
|
||||||
@ -95,20 +95,12 @@ pub enum Geometry {
|
|||||||
impl Geometry {
|
impl Geometry {
|
||||||
/// Fill the geometry without a stroke.
|
/// Fill the geometry without a stroke.
|
||||||
pub fn filled(self, fill: Paint) -> Shape {
|
pub fn filled(self, fill: Paint) -> Shape {
|
||||||
Shape {
|
Shape { geometry: self, fill: Some(fill), stroke: None }
|
||||||
geometry: self,
|
|
||||||
fill: Some(fill),
|
|
||||||
stroke: None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stroke the geometry without a fill.
|
/// Stroke the geometry without a fill.
|
||||||
pub fn stroked(self, stroke: Stroke) -> Shape {
|
pub fn stroked(self, stroke: Stroke) -> Shape {
|
||||||
Shape {
|
Shape { geometry: self, fill: None, stroke: Some(stroke) }
|
||||||
geometry: self,
|
|
||||||
fill: None,
|
|
||||||
stroke: Some(stroke),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,11 +244,11 @@ impl FromStr for RgbaColor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut values: [u8; 4] = [u8::MAX; 4];
|
let mut values: [u8; 4] = [u8::MAX; 4];
|
||||||
for elem in if alpha { 0 .. 4 } else { 0 .. 3 } {
|
for elem in if alpha { 0..4 } else { 0..3 } {
|
||||||
let item_len = if long { 2 } else { 1 };
|
let item_len = if long { 2 } else { 1 };
|
||||||
let pos = elem * item_len;
|
let pos = elem * item_len;
|
||||||
|
|
||||||
let item = &hex_str[pos .. (pos + item_len)];
|
let item = &hex_str[pos..(pos + item_len)];
|
||||||
values[elem] = u8::from_str_radix(item, 16).unwrap();
|
values[elem] = u8::from_str_radix(item, 16).unwrap();
|
||||||
|
|
||||||
if short {
|
if short {
|
||||||
@ -324,12 +324,7 @@ impl CmykColor {
|
|||||||
round_u8(255.0 * (1.0 - c) * (1.0 - k))
|
round_u8(255.0 * (1.0 - c) * (1.0 - k))
|
||||||
};
|
};
|
||||||
|
|
||||||
RgbaColor {
|
RgbaColor { r: f(self.c), g: f(self.m), b: f(self.y), a: 255 }
|
||||||
r: f(self.c),
|
|
||||||
g: f(self.m),
|
|
||||||
b: f(self.y),
|
|
||||||
a: 255,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lighten this color by a factor.
|
/// Lighten this color by a factor.
|
||||||
|
@ -46,7 +46,11 @@ impl Ratio {
|
|||||||
/// Return the ratio of the given `whole`.
|
/// Return the ratio of the given `whole`.
|
||||||
pub fn of<T: Numeric>(self, whole: T) -> T {
|
pub fn of<T: Numeric>(self, whole: T) -> T {
|
||||||
let resolved = whole * self.get();
|
let resolved = whole * self.get();
|
||||||
if resolved.is_finite() { resolved } else { T::zero() }
|
if resolved.is_finite() {
|
||||||
|
resolved
|
||||||
|
} else {
|
||||||
|
T::zero()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,10 +128,7 @@ impl<T: Numeric> Mul<f64> for Rel<T> {
|
|||||||
type Output = Self;
|
type Output = Self;
|
||||||
|
|
||||||
fn mul(self, other: f64) -> Self::Output {
|
fn mul(self, other: f64) -> Self::Output {
|
||||||
Self {
|
Self { rel: self.rel * other, abs: self.abs * other }
|
||||||
rel: self.rel * other,
|
|
||||||
abs: self.abs * other,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,10 +144,7 @@ impl<T: Numeric> Div<f64> for Rel<T> {
|
|||||||
type Output = Self;
|
type Output = Self;
|
||||||
|
|
||||||
fn div(self, other: f64) -> Self::Output {
|
fn div(self, other: f64) -> Self::Output {
|
||||||
Self {
|
Self { rel: self.rel / other, abs: self.abs / other }
|
||||||
rel: self.rel / other,
|
|
||||||
abs: self.abs / other,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,11 +21,7 @@ pub fn rounded_rect(
|
|||||||
if !stroke.is_uniform() {
|
if !stroke.is_uniform() {
|
||||||
for (path, stroke) in stroke_segments(size, radius, stroke) {
|
for (path, stroke) in stroke_segments(size, radius, stroke) {
|
||||||
if stroke.is_some() {
|
if stroke.is_some() {
|
||||||
res.push(Shape {
|
res.push(Shape { geometry: Geometry::Path(path), fill: None, stroke });
|
||||||
geometry: Geometry::Path(path),
|
|
||||||
fill: None,
|
|
||||||
stroke,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,11 +29,7 @@ impl Args {
|
|||||||
pub fn new(span: Span, values: impl IntoIterator<Item = Value>) -> Self {
|
pub fn new(span: Span, values: impl IntoIterator<Item = Value>) -> Self {
|
||||||
let items = values
|
let items = values
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|value| Arg {
|
.map(|value| Arg { span, name: None, value: Spanned::new(value, span) })
|
||||||
span,
|
|
||||||
name: None,
|
|
||||||
value: Spanned::new(value, span),
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
Self { span, items }
|
Self { span, items }
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,7 @@ impl Array {
|
|||||||
.ok_or_else(|| out_of_bounds(end, len))?
|
.ok_or_else(|| out_of_bounds(end, len))?
|
||||||
.max(start);
|
.max(start);
|
||||||
|
|
||||||
Ok(Self::from_vec(self.0[start .. end].to_vec()))
|
Ok(Self::from_vec(self.0[start..end].to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the array contains a specific value.
|
/// Whether the array contains a specific value.
|
||||||
@ -287,11 +287,7 @@ impl Array {
|
|||||||
|
|
||||||
/// Resolve an index.
|
/// Resolve an index.
|
||||||
fn locate(&self, index: i64) -> Option<usize> {
|
fn locate(&self, index: i64) -> Option<usize> {
|
||||||
usize::try_from(if index >= 0 {
|
usize::try_from(if index >= 0 { index } else { self.len().checked_add(index)? })
|
||||||
index
|
|
||||||
} else {
|
|
||||||
self.len().checked_add(index)?
|
|
||||||
})
|
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,10 +161,7 @@ impl Add for Content {
|
|||||||
return lhs;
|
return lhs;
|
||||||
}
|
}
|
||||||
|
|
||||||
let seq = match (
|
let seq = match (lhs.downcast::<SequenceNode>(), rhs.downcast::<SequenceNode>()) {
|
||||||
lhs.downcast::<SequenceNode>(),
|
|
||||||
rhs.downcast::<SequenceNode>(),
|
|
||||||
) {
|
|
||||||
(Some(lhs), Some(rhs)) => lhs.0.iter().chain(&rhs.0).cloned().collect(),
|
(Some(lhs), Some(rhs)) => lhs.0.iter().chain(&rhs.0).cloned().collect(),
|
||||||
(Some(lhs), None) => lhs.0.iter().cloned().chain(iter::once(rhs)).collect(),
|
(Some(lhs), None) => lhs.0.iter().cloned().chain(iter::once(rhs)).collect(),
|
||||||
(None, Some(rhs)) => iter::once(lhs).chain(rhs.0.iter().cloned()).collect(),
|
(None, Some(rhs)) => iter::once(lhs).chain(rhs.0.iter().cloned()).collect(),
|
||||||
|
@ -140,7 +140,6 @@ fn eval_markup(
|
|||||||
vm.scopes.top.define(wrap.binding().take(), tail);
|
vm.scopes.top.define(wrap.binding().take(), tail);
|
||||||
wrap.body().eval(vm)?.display(vm.world)
|
wrap.body().eval(vm)?.display(vm.world)
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => node.eval(vm)?,
|
_ => node.eval(vm)?,
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -162,7 +161,7 @@ impl Eval for ast::MarkupNode {
|
|||||||
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||||
match self {
|
match self {
|
||||||
Self::Space(v) => Ok(match v.newlines() {
|
Self::Space(v) => Ok(match v.newlines() {
|
||||||
0 ..= 1 => (vm.items.space)(),
|
0..=1 => (vm.items.space)(),
|
||||||
_ => (vm.items.parbreak)(),
|
_ => (vm.items.parbreak)(),
|
||||||
}),
|
}),
|
||||||
Self::Linebreak(v) => v.eval(vm),
|
Self::Linebreak(v) => v.eval(vm),
|
||||||
@ -369,10 +368,7 @@ impl Eval for ast::Frac {
|
|||||||
type Output = Content;
|
type Output = Content;
|
||||||
|
|
||||||
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||||
Ok((vm.items.math_frac)(
|
Ok((vm.items.math_frac)(self.num().eval(vm)?, self.denom().eval(vm)?))
|
||||||
self.num().eval(vm)?,
|
|
||||||
self.denom().eval(vm)?,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -501,7 +497,6 @@ fn eval_code(
|
|||||||
vm.scopes.top.define(wrap.binding().take(), tail);
|
vm.scopes.top.define(wrap.binding().take(), tail);
|
||||||
wrap.body().eval(vm)?
|
wrap.body().eval(vm)?
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => expr.eval(vm)?,
|
_ => expr.eval(vm)?,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -676,18 +671,12 @@ impl Eval for ast::FieldAccess {
|
|||||||
|
|
||||||
Ok(match object {
|
Ok(match object {
|
||||||
Value::Dict(dict) => dict.get(&field).at(span)?.clone(),
|
Value::Dict(dict) => dict.get(&field).at(span)?.clone(),
|
||||||
|
|
||||||
Value::Content(node) => node
|
Value::Content(node) => node
|
||||||
.to::<dyn Show>()
|
.to::<dyn Show>()
|
||||||
.and_then(|node| node.field(&field))
|
.and_then(|node| node.field(&field))
|
||||||
.ok_or_else(|| format!("unknown field {field:?}"))
|
.ok_or_else(|| format!("unknown field {field:?}"))
|
||||||
.at(span)?,
|
.at(span)?,
|
||||||
|
v => bail!(self.target().span(), "cannot access field on {}", v.type_name()),
|
||||||
v => bail!(
|
|
||||||
self.target().span(),
|
|
||||||
"cannot access field on {}",
|
|
||||||
v.type_name()
|
|
||||||
),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -706,7 +695,6 @@ impl Eval for ast::FuncCall {
|
|||||||
let point = || Tracepoint::Call(func.name().map(Into::into));
|
let point = || Tracepoint::Call(func.name().map(Into::into));
|
||||||
func.call(vm, args).trace(vm.world, point, self.span())?
|
func.call(vm, args).trace(vm.world, point, self.span())?
|
||||||
}
|
}
|
||||||
|
|
||||||
v => bail!(
|
v => bail!(
|
||||||
self.callee().span(),
|
self.callee().span(),
|
||||||
"expected callable or collection, found {}",
|
"expected callable or collection, found {}",
|
||||||
|
@ -32,12 +32,7 @@ impl Func {
|
|||||||
name: &'static str,
|
name: &'static str,
|
||||||
func: fn(&mut Vm, &mut Args) -> SourceResult<Value>,
|
func: fn(&mut Vm, &mut Args) -> SourceResult<Value>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self(Arc::new(Repr::Native(Native {
|
Self(Arc::new(Repr::Native(Native { name, func, set: None, node: None })))
|
||||||
name,
|
|
||||||
func,
|
|
||||||
set: None,
|
|
||||||
node: None,
|
|
||||||
})))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new function from a native rust node.
|
/// Create a new function from a native rust node.
|
||||||
@ -92,7 +87,7 @@ impl Func {
|
|||||||
Repr::Native(native) => (native.func)(vm, &mut args)?,
|
Repr::Native(native) => (native.func)(vm, &mut args)?,
|
||||||
Repr::Closure(closure) => closure.call(vm, &mut args)?,
|
Repr::Closure(closure) => closure.call(vm, &mut args)?,
|
||||||
Repr::With(wrapped, applied) => {
|
Repr::With(wrapped, applied) => {
|
||||||
args.items.splice(.. 0, applied.items.iter().cloned());
|
args.items.splice(..0, applied.items.iter().cloned());
|
||||||
return wrapped.call(vm, args);
|
return wrapped.call(vm, args);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -194,12 +189,15 @@ impl Closure {
|
|||||||
|
|
||||||
// Parse the arguments according to the parameter list.
|
// Parse the arguments according to the parameter list.
|
||||||
for (param, default) in &self.params {
|
for (param, default) in &self.params {
|
||||||
scopes.top.define(param.clone(), match default {
|
scopes.top.define(
|
||||||
None => args.expect::<Value>(param)?,
|
param.clone(),
|
||||||
|
match default {
|
||||||
Some(default) => {
|
Some(default) => {
|
||||||
args.named::<Value>(param)?.unwrap_or_else(|| default.clone())
|
args.named::<Value>(param)?.unwrap_or_else(|| default.clone())
|
||||||
}
|
}
|
||||||
});
|
None => args.expect::<Value>(param)?,
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Put the remaining arguments into the sink.
|
// Put the remaining arguments into the sink.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
//! Layout and computation model.
|
//! Document and computation model.
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod items;
|
mod items;
|
||||||
@ -23,18 +23,18 @@ mod ops;
|
|||||||
mod scope;
|
mod scope;
|
||||||
mod vm;
|
mod vm;
|
||||||
|
|
||||||
pub use self::str::*;
|
|
||||||
pub use args::*;
|
|
||||||
pub use array::*;
|
|
||||||
pub use cast::*;
|
|
||||||
pub use content::*;
|
|
||||||
pub use dict::*;
|
|
||||||
pub use eval::*;
|
|
||||||
pub use func::*;
|
|
||||||
pub use items::*;
|
|
||||||
pub use scope::*;
|
|
||||||
pub use styles::*;
|
|
||||||
pub use value::*;
|
|
||||||
pub use vm::*;
|
|
||||||
|
|
||||||
pub use typst_macros::{capability, node};
|
pub use typst_macros::{capability, node};
|
||||||
|
|
||||||
|
pub use self::args::*;
|
||||||
|
pub use self::array::*;
|
||||||
|
pub use self::cast::*;
|
||||||
|
pub use self::content::*;
|
||||||
|
pub use self::dict::*;
|
||||||
|
pub use self::eval::*;
|
||||||
|
pub use self::func::*;
|
||||||
|
pub use self::items::*;
|
||||||
|
pub use self::scope::*;
|
||||||
|
pub use self::str::*;
|
||||||
|
pub use self::styles::*;
|
||||||
|
pub use self::value::*;
|
||||||
|
pub use self::vm::*;
|
||||||
|
@ -103,18 +103,18 @@ pub fn add(lhs: Value, rhs: Value) -> StrResult<Value> {
|
|||||||
if let (Some(&a), Some(&b)) =
|
if let (Some(&a), Some(&b)) =
|
||||||
(a.downcast::<GenAlign>(), b.downcast::<GenAlign>())
|
(a.downcast::<GenAlign>(), b.downcast::<GenAlign>())
|
||||||
{
|
{
|
||||||
if a.axis() != b.axis() {
|
if a.axis() == b.axis() {
|
||||||
Value::dynamic(match a.axis() {
|
|
||||||
Axis::X => Axes { x: a, y: b },
|
|
||||||
Axis::Y => Axes { x: b, y: a },
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
return Err(format!("cannot add two {:?} alignments", a.axis()));
|
return Err(format!("cannot add two {:?} alignments", a.axis()));
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
|
return Ok(Value::dynamic(match a.axis() {
|
||||||
|
Axis::X => Axes { x: a, y: b },
|
||||||
|
Axis::Y => Axes { x: b, y: a },
|
||||||
|
}));
|
||||||
|
};
|
||||||
|
|
||||||
mismatch!("cannot add {} and {}", a, b);
|
mismatch!("cannot add {} and {}", a, b);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
(a, b) => mismatch!("cannot add {} and {}", a, b),
|
(a, b) => mismatch!("cannot add {} and {}", a, b),
|
||||||
})
|
})
|
||||||
@ -370,17 +370,11 @@ pub fn not_in(lhs: Value, rhs: Value) -> StrResult<Value> {
|
|||||||
|
|
||||||
/// Test for containment.
|
/// Test for containment.
|
||||||
pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
|
pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
|
||||||
Some(match (lhs, rhs) {
|
match (lhs, rhs) {
|
||||||
(Str(a), Str(b)) => b.as_str().contains(a.as_str()),
|
(Str(a), Str(b)) => Some(b.as_str().contains(a.as_str())),
|
||||||
(Dyn(a), Str(b)) => {
|
(Dyn(a), Str(b)) => a.downcast::<Regex>().map(|regex| regex.is_match(b)),
|
||||||
if let Some(regex) = a.downcast::<Regex>() {
|
(Str(a), Dict(b)) => Some(b.contains(a)),
|
||||||
regex.is_match(b)
|
(a, Array(b)) => Some(b.contains(a)),
|
||||||
} else {
|
_ => Option::None,
|
||||||
return Option::None;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
(Str(a), Dict(b)) => b.contains(a),
|
|
||||||
(a, Array(b)) => b.contains(a),
|
|
||||||
_ => return Option::None,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -67,16 +67,12 @@ impl Str {
|
|||||||
.ok_or_else(|| out_of_bounds(end, len))?
|
.ok_or_else(|| out_of_bounds(end, len))?
|
||||||
.max(start);
|
.max(start);
|
||||||
|
|
||||||
Ok(self.0[start .. end].into())
|
Ok(self.0[start..end].into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolve an index.
|
/// Resolve an index.
|
||||||
fn locate(&self, index: i64) -> Option<usize> {
|
fn locate(&self, index: i64) -> Option<usize> {
|
||||||
usize::try_from(if index >= 0 {
|
usize::try_from(if index >= 0 { index } else { self.len().checked_add(index)? })
|
||||||
index
|
|
||||||
} else {
|
|
||||||
self.len().checked_add(index)?
|
|
||||||
})
|
|
||||||
.ok()
|
.ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -207,7 +203,7 @@ impl Str {
|
|||||||
Some(StrPattern::Regex(re)) => {
|
Some(StrPattern::Regex(re)) => {
|
||||||
let s = self.as_str();
|
let s = self.as_str();
|
||||||
let mut last = 0;
|
let mut last = 0;
|
||||||
let mut range = 0 .. s.len();
|
let mut range = 0..s.len();
|
||||||
|
|
||||||
for m in re.find_iter(s) {
|
for m in re.find_iter(s) {
|
||||||
// Does this match follow directly after the last one?
|
// Does this match follow directly after the last one?
|
||||||
@ -235,7 +231,7 @@ impl Str {
|
|||||||
range.end = s.len();
|
range.end = s.len();
|
||||||
}
|
}
|
||||||
|
|
||||||
&s[range.start .. range.start.max(range.end)]
|
&s[range.start..range.start.max(range.end)]
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -271,10 +267,7 @@ impl Str {
|
|||||||
/// The out of bounds access error message.
|
/// The out of bounds access error message.
|
||||||
#[cold]
|
#[cold]
|
||||||
fn out_of_bounds(index: i64, len: i64) -> String {
|
fn out_of_bounds(index: i64, len: i64) -> String {
|
||||||
format!(
|
format!("string index out of bounds (index: {}, len: {})", index, len)
|
||||||
"string index out of bounds (index: {}, len: {})",
|
|
||||||
index, len
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert an item of std's `match_indices` to a dictionary.
|
/// Convert an item of std's `match_indices` to a dictionary.
|
||||||
|
@ -94,7 +94,7 @@ impl StyleMap {
|
|||||||
/// This is useful over `chain` when you want to combine two maps, but you
|
/// This is useful over `chain` when you want to combine two maps, but you
|
||||||
/// still need an owned map without a lifetime.
|
/// still need an owned map without a lifetime.
|
||||||
pub fn apply_map(&mut self, tail: &Self) {
|
pub fn apply_map(&mut self, tail: &Self) {
|
||||||
self.0.splice(0 .. 0, tail.0.iter().cloned());
|
self.0.splice(0..0, tail.0.iter().cloned());
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mark all contained properties as _scoped_. This means that they only
|
/// Mark all contained properties as _scoped_. This means that they only
|
||||||
@ -159,10 +159,7 @@ impl StyleEntry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
StyleChain {
|
StyleChain { head: std::slice::from_ref(self), tail: Some(tail) }
|
||||||
head: std::slice::from_ref(self),
|
|
||||||
tail: Some(tail),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If this is a property, return it.
|
/// If this is a property, return it.
|
||||||
@ -328,7 +325,7 @@ impl<'a> StyleChain<'a> {
|
|||||||
let mut suffix = StyleMap::new();
|
let mut suffix = StyleMap::new();
|
||||||
let take = self.links().count().saturating_sub(len);
|
let take = self.links().count().saturating_sub(len);
|
||||||
for link in self.links().take(take) {
|
for link in self.links().take(take) {
|
||||||
suffix.0.splice(0 .. 0, link.iter().cloned());
|
suffix.0.splice(0..0, link.iter().cloned());
|
||||||
}
|
}
|
||||||
suffix
|
suffix
|
||||||
}
|
}
|
||||||
@ -344,10 +341,7 @@ impl<'a> StyleChain<'a> {
|
|||||||
|
|
||||||
/// Iterate over the entries of the chain.
|
/// Iterate over the entries of the chain.
|
||||||
fn entries(self) -> Entries<'a> {
|
fn entries(self) -> Entries<'a> {
|
||||||
Entries {
|
Entries { inner: [].as_slice().iter(), links: self.links() }
|
||||||
inner: [].as_slice().iter(),
|
|
||||||
links: self.links(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterate over the links of the chain.
|
/// Iterate over the links of the chain.
|
||||||
@ -582,12 +576,12 @@ impl<'a, T> StyleVecBuilder<'a, T> {
|
|||||||
for &(mut chain, _) in iter {
|
for &(mut chain, _) in iter {
|
||||||
let len = chain.links().count();
|
let len = chain.links().count();
|
||||||
if len < shared {
|
if len < shared {
|
||||||
for _ in 0 .. shared - len {
|
for _ in 0..shared - len {
|
||||||
trunk.pop();
|
trunk.pop();
|
||||||
}
|
}
|
||||||
shared = len;
|
shared = len;
|
||||||
} else if len > shared {
|
} else if len > shared {
|
||||||
for _ in 0 .. len - shared {
|
for _ in 0..len - shared {
|
||||||
chain.pop();
|
chain.pop();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1017,7 +1011,7 @@ impl Recipe {
|
|||||||
for mat in regex.find_iter(text) {
|
for mat in regex.find_iter(text) {
|
||||||
let start = mat.start();
|
let start = mat.start();
|
||||||
if cursor < start {
|
if cursor < start {
|
||||||
result.push(make(text[cursor .. start].into()));
|
result.push(make(text[cursor..start].into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
result.push(self.call(world, || Value::Str(mat.as_str().into()))?);
|
result.push(self.call(world, || Value::Str(mat.as_str().into()))?);
|
||||||
@ -1029,7 +1023,7 @@ impl Recipe {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cursor < text.len() {
|
if cursor < text.len() {
|
||||||
result.push(make(text[cursor ..].into()));
|
result.push(make(text[cursor..].into()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Content::sequence(result)
|
Content::sequence(result)
|
||||||
@ -1066,11 +1060,7 @@ impl Recipe {
|
|||||||
|
|
||||||
impl Debug for Recipe {
|
impl Debug for Recipe {
|
||||||
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
|
||||||
write!(
|
write!(f, "Recipe matching {:?} from {:?}", self.pattern, self.func.span)
|
||||||
f,
|
|
||||||
"Recipe matching {:?} from {:?}",
|
|
||||||
self.pattern, self.func.span
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -294,11 +294,8 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn dyn_eq(&self, other: &Dynamic) -> bool {
|
fn dyn_eq(&self, other: &Dynamic) -> bool {
|
||||||
if let Some(other) = other.downcast::<Self>() {
|
let Some(other) = other.downcast::<Self>() else { return false };
|
||||||
self == other
|
self == other
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn dyn_type_name(&self) -> &'static str {
|
fn dyn_type_name(&self) -> &'static str {
|
||||||
@ -411,15 +408,9 @@ mod tests {
|
|||||||
test(Abs::pt(5.5), "5.5pt");
|
test(Abs::pt(5.5), "5.5pt");
|
||||||
test(Angle::deg(90.0), "90deg");
|
test(Angle::deg(90.0), "90deg");
|
||||||
test(Ratio::one() / 2.0, "50%");
|
test(Ratio::one() / 2.0, "50%");
|
||||||
test(
|
test(Ratio::new(0.3) + Length::from(Abs::cm(2.0)), "30% + 56.69pt");
|
||||||
Ratio::new(0.3) + Length::from(Abs::cm(2.0)),
|
|
||||||
"30% + 56.69pt",
|
|
||||||
);
|
|
||||||
test(Fr::one() * 7.55, "7.55fr");
|
test(Fr::one() * 7.55, "7.55fr");
|
||||||
test(
|
test(Color::Rgba(RgbaColor::new(1, 1, 1, 0xff)), "rgb(\"#010101\")");
|
||||||
Color::Rgba(RgbaColor::new(1, 1, 1, 0xff)),
|
|
||||||
"rgb(\"#010101\")",
|
|
||||||
);
|
|
||||||
|
|
||||||
// Collections.
|
// Collections.
|
||||||
test("hello", r#""hello""#);
|
test("hello", r#""hello""#);
|
||||||
|
@ -1471,7 +1471,11 @@ impl ForPattern {
|
|||||||
pub fn key(&self) -> Option<Ident> {
|
pub fn key(&self) -> Option<Ident> {
|
||||||
let mut children = self.0.children().filter_map(SyntaxNode::cast);
|
let mut children = self.0.children().filter_map(SyntaxNode::cast);
|
||||||
let key = children.next();
|
let key = children.next();
|
||||||
if children.next().is_some() { key } else { None }
|
if children.next().is_some() {
|
||||||
|
key
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The value part of the pattern.
|
/// The value part of the pattern.
|
||||||
|
@ -81,7 +81,7 @@ where
|
|||||||
F: FnMut(Range<usize>, Style),
|
F: FnMut(Range<usize>, Style),
|
||||||
{
|
{
|
||||||
if node.children().len() == 0 {
|
if node.children().len() == 0 {
|
||||||
let range = offset .. offset + node.len();
|
let range = offset..offset + node.len();
|
||||||
let style = highlighter.style_for_stack(&scopes);
|
let style = highlighter.style_for_stack(&scopes);
|
||||||
f(range, style);
|
f(range, style);
|
||||||
return;
|
return;
|
||||||
@ -112,7 +112,7 @@ where
|
|||||||
F: FnMut(Range<usize>, Category),
|
F: FnMut(Range<usize>, Category),
|
||||||
{
|
{
|
||||||
for (i, child) in node.children().enumerate() {
|
for (i, child) in node.children().enumerate() {
|
||||||
let span = offset .. offset + child.len();
|
let span = offset..offset + child.len();
|
||||||
if range.start <= span.end && range.end >= span.start {
|
if range.start <= span.end && range.end >= span.start {
|
||||||
if let Some(category) = Category::determine(child, node, i) {
|
if let Some(category) = Category::determine(child, node, i) {
|
||||||
f(span, category);
|
f(span, category);
|
||||||
@ -412,29 +412,35 @@ mod tests {
|
|||||||
fn test(text: &str, goal: &[(Range<usize>, Category)]) {
|
fn test(text: &str, goal: &[(Range<usize>, Category)]) {
|
||||||
let mut vec = vec![];
|
let mut vec = vec![];
|
||||||
let source = Source::detached(text);
|
let source = Source::detached(text);
|
||||||
let full = 0 .. text.len();
|
let full = 0..text.len();
|
||||||
highlight_categories(source.root(), full, &mut |range, category| {
|
highlight_categories(source.root(), full, &mut |range, category| {
|
||||||
vec.push((range, category));
|
vec.push((range, category));
|
||||||
});
|
});
|
||||||
assert_eq!(vec, goal);
|
assert_eq!(vec, goal);
|
||||||
}
|
}
|
||||||
|
|
||||||
test("= *AB*", &[(0 .. 6, Heading), (2 .. 6, Strong)]);
|
test("= *AB*", &[(0..6, Heading), (2..6, Strong)]);
|
||||||
|
|
||||||
test("#f(x + 1)", &[
|
test(
|
||||||
(0 .. 2, Function),
|
"#f(x + 1)",
|
||||||
(2 .. 3, Bracket),
|
&[
|
||||||
(5 .. 6, Operator),
|
(0..2, Function),
|
||||||
(7 .. 8, Number),
|
(2..3, Bracket),
|
||||||
(8 .. 9, Bracket),
|
(5..6, Operator),
|
||||||
]);
|
(7..8, Number),
|
||||||
|
(8..9, Bracket),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
test("#let f(x) = x", &[
|
test(
|
||||||
(0 .. 4, Keyword),
|
"#let f(x) = x",
|
||||||
(5 .. 6, Function),
|
&[
|
||||||
(6 .. 7, Bracket),
|
(0..4, Keyword),
|
||||||
(8 .. 9, Bracket),
|
(5..6, Function),
|
||||||
(10 .. 11, Operator),
|
(6..7, Bracket),
|
||||||
]);
|
(8..9, Bracket),
|
||||||
|
(10..11, Operator),
|
||||||
|
],
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ pub fn reparse(
|
|||||||
let id = root.span().source();
|
let id = root.span().source();
|
||||||
*root = parse(text);
|
*root = parse(text);
|
||||||
root.numberize(id, Span::FULL).unwrap();
|
root.numberize(id, Span::FULL).unwrap();
|
||||||
0 .. text.len()
|
0..text.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Try to reparse inside the given node.
|
/// Try to reparse inside the given node.
|
||||||
@ -55,7 +55,7 @@ fn try_reparse(
|
|||||||
// Find the the first child in the range of children to reparse.
|
// Find the the first child in the range of children to reparse.
|
||||||
for (i, child) in node.children().enumerate() {
|
for (i, child) in node.children().enumerate() {
|
||||||
let pos = NodePos { idx: i, offset };
|
let pos = NodePos { idx: i, offset };
|
||||||
let child_span = offset .. offset + child.len();
|
let child_span = offset..offset + child.len();
|
||||||
child_outermost = outermost && i + 1 == original_count;
|
child_outermost = outermost && i + 1 == original_count;
|
||||||
|
|
||||||
match search {
|
match search {
|
||||||
@ -81,7 +81,7 @@ fn try_reparse(
|
|||||||
} else {
|
} else {
|
||||||
// Update compulsary state of `ahead_nontrivia`.
|
// Update compulsary state of `ahead_nontrivia`.
|
||||||
if let Some(ahead_nontrivia) = ahead.as_mut() {
|
if let Some(ahead_nontrivia) = ahead.as_mut() {
|
||||||
if let NodeKind::Space { newlines: (1 ..) } = child.kind() {
|
if let NodeKind::Space { newlines: (1..) } = child.kind() {
|
||||||
ahead_nontrivia.newline();
|
ahead_nontrivia.newline();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -126,10 +126,13 @@ fn try_reparse(
|
|||||||
// If we were looking for a non-whitespace element and hit the end of
|
// If we were looking for a non-whitespace element and hit the end of
|
||||||
// the file here, we instead use EOF as the end of the span.
|
// the file here, we instead use EOF as the end of the span.
|
||||||
if let SearchState::RequireNonTrivia(start) = search {
|
if let SearchState::RequireNonTrivia(start) = search {
|
||||||
search = SearchState::SpanFound(start, NodePos {
|
search = SearchState::SpanFound(
|
||||||
|
start,
|
||||||
|
NodePos {
|
||||||
idx: node.children().len() - 1,
|
idx: node.children().len() - 1,
|
||||||
offset: offset - node.children().last().unwrap().len(),
|
offset: offset - node.children().last().unwrap().len(),
|
||||||
})
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if let SearchState::Contained(pos) = search {
|
if let SearchState::Contained(pos) = search {
|
||||||
@ -156,7 +159,7 @@ fn try_reparse(
|
|||||||
return Some(range);
|
return Some(range);
|
||||||
}
|
}
|
||||||
|
|
||||||
let superseded_span = pos.offset .. pos.offset + prev_len;
|
let superseded_span = pos.offset..pos.offset + prev_len;
|
||||||
let func: Option<ReparseMode> = match child.kind() {
|
let func: Option<ReparseMode> = match child.kind() {
|
||||||
NodeKind::CodeBlock => Some(ReparseMode::Code),
|
NodeKind::CodeBlock => Some(ReparseMode::Code),
|
||||||
NodeKind::ContentBlock => Some(ReparseMode::Content),
|
NodeKind::ContentBlock => Some(ReparseMode::Content),
|
||||||
@ -170,7 +173,7 @@ fn try_reparse(
|
|||||||
change,
|
change,
|
||||||
node,
|
node,
|
||||||
func,
|
func,
|
||||||
pos.idx .. pos.idx + 1,
|
pos.idx..pos.idx + 1,
|
||||||
superseded_span,
|
superseded_span,
|
||||||
outermost,
|
outermost,
|
||||||
) {
|
) {
|
||||||
@ -197,13 +200,13 @@ fn try_reparse(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let superseded_span =
|
let superseded_span =
|
||||||
start.offset .. end.offset + node.children().as_slice()[end.idx].len();
|
start.offset..end.offset + node.children().as_slice()[end.idx].len();
|
||||||
|
|
||||||
replace(
|
replace(
|
||||||
change,
|
change,
|
||||||
node,
|
node,
|
||||||
ReparseMode::MarkupElements { at_start, min_indent },
|
ReparseMode::MarkupElements { at_start, min_indent },
|
||||||
start.idx .. end.idx + 1,
|
start.idx..end.idx + 1,
|
||||||
superseded_span,
|
superseded_span,
|
||||||
outermost,
|
outermost,
|
||||||
)
|
)
|
||||||
@ -223,33 +226,33 @@ fn replace(
|
|||||||
let differential: isize =
|
let differential: isize =
|
||||||
change.replacement_len as isize - change.replaced.len() as isize;
|
change.replacement_len as isize - change.replaced.len() as isize;
|
||||||
let newborn_end = (superseded_span.end as isize + differential) as usize;
|
let newborn_end = (superseded_span.end as isize + differential) as usize;
|
||||||
let newborn_span = superseded_span.start .. newborn_end;
|
let newborn_span = superseded_span.start..newborn_end;
|
||||||
|
|
||||||
let mut prefix = "";
|
let mut prefix = "";
|
||||||
for (i, c) in change.text[.. newborn_span.start].char_indices().rev() {
|
for (i, c) in change.text[..newborn_span.start].char_indices().rev() {
|
||||||
if is_newline(c) {
|
if is_newline(c) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
prefix = &change.text[i .. newborn_span.start];
|
prefix = &change.text[i..newborn_span.start];
|
||||||
}
|
}
|
||||||
|
|
||||||
let (newborns, terminated, amount) = match mode {
|
let (newborns, terminated, amount) = match mode {
|
||||||
ReparseMode::Code => reparse_code_block(
|
ReparseMode::Code => reparse_code_block(
|
||||||
prefix,
|
prefix,
|
||||||
&change.text[newborn_span.start ..],
|
&change.text[newborn_span.start..],
|
||||||
newborn_span.len(),
|
newborn_span.len(),
|
||||||
),
|
),
|
||||||
ReparseMode::Content => reparse_content_block(
|
ReparseMode::Content => reparse_content_block(
|
||||||
prefix,
|
prefix,
|
||||||
&change.text[newborn_span.start ..],
|
&change.text[newborn_span.start..],
|
||||||
newborn_span.len(),
|
newborn_span.len(),
|
||||||
),
|
),
|
||||||
ReparseMode::MarkupElements { at_start, min_indent } => reparse_markup_elements(
|
ReparseMode::MarkupElements { at_start, min_indent } => reparse_markup_elements(
|
||||||
prefix,
|
prefix,
|
||||||
&change.text[newborn_span.start ..],
|
&change.text[newborn_span.start..],
|
||||||
newborn_span.len(),
|
newborn_span.len(),
|
||||||
differential,
|
differential,
|
||||||
&node.children().as_slice()[superseded_start ..],
|
&node.children().as_slice()[superseded_start..],
|
||||||
at_start,
|
at_start,
|
||||||
min_indent,
|
min_indent,
|
||||||
),
|
),
|
||||||
@ -261,7 +264,7 @@ fn replace(
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
node.replace_children(superseded_start .. superseded_start + amount, newborns)
|
node.replace_children(superseded_start..superseded_start + amount, newborns)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
Some(newborn_span)
|
Some(newborn_span)
|
||||||
@ -351,11 +354,7 @@ impl Ahead {
|
|||||||
Self {
|
Self {
|
||||||
pos,
|
pos,
|
||||||
at_start,
|
at_start,
|
||||||
kind: if bounded {
|
kind: if bounded { AheadKind::Normal } else { AheadKind::Unbounded(true) },
|
||||||
AheadKind::Normal
|
|
||||||
} else {
|
|
||||||
AheadKind::Unbounded(true)
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,7 +401,7 @@ fn is_bounded(kind: &NodeKind) -> bool {
|
|||||||
/// previous value of the property.
|
/// previous value of the property.
|
||||||
fn next_at_start(kind: &NodeKind, prev: bool) -> bool {
|
fn next_at_start(kind: &NodeKind, prev: bool) -> bool {
|
||||||
match kind {
|
match kind {
|
||||||
NodeKind::Space { newlines: (1 ..) } => true,
|
NodeKind::Space { newlines: (1..) } => true,
|
||||||
NodeKind::Space { .. } | NodeKind::LineComment | NodeKind::BlockComment => prev,
|
NodeKind::Space { .. } | NodeKind::LineComment | NodeKind::BlockComment => prev,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
pub mod ast;
|
pub mod ast;
|
||||||
pub mod highlight;
|
pub mod highlight;
|
||||||
|
|
||||||
mod incremental;
|
mod incremental;
|
||||||
mod kind;
|
mod kind;
|
||||||
mod node;
|
mod node;
|
||||||
@ -12,12 +13,12 @@ mod source;
|
|||||||
mod span;
|
mod span;
|
||||||
mod tokens;
|
mod tokens;
|
||||||
|
|
||||||
pub use kind::*;
|
pub use self::kind::*;
|
||||||
pub use node::*;
|
pub use self::node::*;
|
||||||
pub use parsing::*;
|
pub use self::parsing::*;
|
||||||
pub use source::*;
|
pub use self::source::*;
|
||||||
pub use span::*;
|
pub use self::span::*;
|
||||||
pub use tokens::*;
|
pub use self::tokens::*;
|
||||||
|
|
||||||
use incremental::reparse;
|
use incremental::reparse;
|
||||||
use parser::*;
|
use parser::*;
|
||||||
|
@ -291,16 +291,16 @@ impl InnerNode {
|
|||||||
let mut start = within.start;
|
let mut start = within.start;
|
||||||
if range.is_none() {
|
if range.is_none() {
|
||||||
let end = start + stride;
|
let end = start + stride;
|
||||||
self.data.numberize(id, start .. end)?;
|
self.data.numberize(id, start..end)?;
|
||||||
self.upper = within.end;
|
self.upper = within.end;
|
||||||
start = end;
|
start = end;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Number the children.
|
// Number the children.
|
||||||
let len = self.children.len();
|
let len = self.children.len();
|
||||||
for child in &mut self.children[range.unwrap_or(0 .. len)] {
|
for child in &mut self.children[range.unwrap_or(0..len)] {
|
||||||
let end = start + child.descendants() as u64 * stride;
|
let end = start + child.descendants() as u64 * stride;
|
||||||
child.numberize(id, start .. end)?;
|
child.numberize(id, start..end)?;
|
||||||
start = end;
|
start = end;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -377,8 +377,8 @@ impl InnerNode {
|
|||||||
// - or if we were erroneous before due to a non-superseded node.
|
// - or if we were erroneous before due to a non-superseded node.
|
||||||
self.erroneous = replacement.iter().any(SyntaxNode::erroneous)
|
self.erroneous = replacement.iter().any(SyntaxNode::erroneous)
|
||||||
|| (self.erroneous
|
|| (self.erroneous
|
||||||
&& (self.children[.. range.start].iter().any(SyntaxNode::erroneous))
|
&& (self.children[..range.start].iter().any(SyntaxNode::erroneous))
|
||||||
|| self.children[range.end ..].iter().any(SyntaxNode::erroneous));
|
|| self.children[range.end..].iter().any(SyntaxNode::erroneous));
|
||||||
|
|
||||||
// Perform the replacement.
|
// Perform the replacement.
|
||||||
let replacement_count = replacement.len();
|
let replacement_count = replacement.len();
|
||||||
@ -392,7 +392,7 @@ impl InnerNode {
|
|||||||
let max_left = range.start;
|
let max_left = range.start;
|
||||||
let max_right = self.children.len() - range.end;
|
let max_right = self.children.len() - range.end;
|
||||||
loop {
|
loop {
|
||||||
let renumber = range.start - left .. range.end + right;
|
let renumber = range.start - left..range.end + right;
|
||||||
|
|
||||||
// The minimum assignable number is either
|
// The minimum assignable number is either
|
||||||
// - the upper bound of the node right before the to-be-renumbered
|
// - the upper bound of the node right before the to-be-renumbered
|
||||||
@ -416,7 +416,7 @@ impl InnerNode {
|
|||||||
.map_or(self.upper(), |next| next.span().number());
|
.map_or(self.upper(), |next| next.span().number());
|
||||||
|
|
||||||
// Try to renumber.
|
// Try to renumber.
|
||||||
let within = start_number .. end_number;
|
let within = start_number..end_number;
|
||||||
let id = self.span().source();
|
let id = self.span().source();
|
||||||
if self.numberize(id, Some(renumber), within).is_ok() {
|
if self.numberize(id, Some(renumber), within).is_ok() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
@ -529,7 +529,7 @@ impl NodeData {
|
|||||||
|
|
||||||
/// If the span points into this node, convert it to a byte range.
|
/// If the span points into this node, convert it to a byte range.
|
||||||
fn range(&self, span: Span, offset: usize) -> Option<Range<usize>> {
|
fn range(&self, span: Span, offset: usize) -> Option<Range<usize>> {
|
||||||
(self.span == span).then(|| offset .. offset + self.len())
|
(self.span == span).then(|| offset..offset + self.len())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ impl<'s> Parser<'s> {
|
|||||||
// Trailing trivia should not be wrapped into the new node.
|
// Trailing trivia should not be wrapped into the new node.
|
||||||
let idx = self.children.len();
|
let idx = self.children.len();
|
||||||
self.children.push(SyntaxNode::default());
|
self.children.push(SyntaxNode::default());
|
||||||
self.children.extend(children.drain(until.0 ..));
|
self.children.extend(children.drain(until.0..));
|
||||||
self.children[idx] = InnerNode::with_children(kind, children).into();
|
self.children[idx] = InnerNode::with_children(kind, children).into();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -177,7 +177,11 @@ impl<'s> Parser<'s> {
|
|||||||
|
|
||||||
/// Peek at the current token without consuming it.
|
/// Peek at the current token without consuming it.
|
||||||
pub fn peek(&self) -> Option<&NodeKind> {
|
pub fn peek(&self) -> Option<&NodeKind> {
|
||||||
if self.eof { None } else { self.current.as_ref() }
|
if self.eof {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
self.current.as_ref()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek at the current token, but only if it follows immediately after the
|
/// Peek at the current token, but only if it follows immediately after the
|
||||||
@ -192,7 +196,7 @@ impl<'s> Parser<'s> {
|
|||||||
|
|
||||||
/// Peek at the source of the current token.
|
/// Peek at the source of the current token.
|
||||||
pub fn peek_src(&self) -> &'s str {
|
pub fn peek_src(&self) -> &'s str {
|
||||||
self.get(self.current_start() .. self.current_end())
|
self.get(self.current_start()..self.current_end())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Obtain a range of the source code.
|
/// Obtain a range of the source code.
|
||||||
@ -303,7 +307,7 @@ impl<'s> Parser<'s> {
|
|||||||
if group_mode != TokenMode::Markup {
|
if group_mode != TokenMode::Markup {
|
||||||
let start = self.trivia_start().0;
|
let start = self.trivia_start().0;
|
||||||
target = self.current_start
|
target = self.current_start
|
||||||
- self.children[start ..].iter().map(SyntaxNode::len).sum::<usize>();
|
- self.children[start..].iter().map(SyntaxNode::len).sum::<usize>();
|
||||||
self.children.truncate(start);
|
self.children.truncate(start);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -466,7 +470,7 @@ impl Marker {
|
|||||||
/// with the given `kind`.
|
/// with the given `kind`.
|
||||||
pub fn end(self, p: &mut Parser, kind: NodeKind) {
|
pub fn end(self, p: &mut Parser, kind: NodeKind) {
|
||||||
let until = p.trivia_start().0.max(self.0);
|
let until = p.trivia_start().0.max(self.0);
|
||||||
let children = p.children.drain(self.0 .. until).collect();
|
let children = p.children.drain(self.0..until).collect();
|
||||||
p.children
|
p.children
|
||||||
.insert(self.0, InnerNode::with_children(kind, children).into());
|
.insert(self.0, InnerNode::with_children(kind, children).into());
|
||||||
}
|
}
|
||||||
@ -476,7 +480,7 @@ impl Marker {
|
|||||||
where
|
where
|
||||||
F: FnMut(&SyntaxNode) -> Result<(), &'static str>,
|
F: FnMut(&SyntaxNode) -> Result<(), &'static str>,
|
||||||
{
|
{
|
||||||
for child in &mut p.children[self.0 ..] {
|
for child in &mut p.children[self.0..] {
|
||||||
// Don't expose errors.
|
// Don't expose errors.
|
||||||
if child.kind().is_error() {
|
if child.kind().is_error() {
|
||||||
continue;
|
continue;
|
||||||
|
@ -90,7 +90,7 @@ pub(crate) fn reparse_markup_elements(
|
|||||||
let mut stopped = false;
|
let mut stopped = false;
|
||||||
|
|
||||||
'outer: while !p.eof() {
|
'outer: while !p.eof() {
|
||||||
if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
|
if let Some(NodeKind::Space { newlines: (1..) }) = p.peek() {
|
||||||
if p.column(p.current_end()) < min_indent {
|
if p.column(p.current_end()) < min_indent {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@ -167,7 +167,7 @@ fn markup_indented(p: &mut Parser, min_indent: usize) {
|
|||||||
|
|
||||||
while !p.eof() {
|
while !p.eof() {
|
||||||
match p.peek() {
|
match p.peek() {
|
||||||
Some(NodeKind::Space { newlines: (1 ..) })
|
Some(NodeKind::Space { newlines: (1..) })
|
||||||
if p.column(p.current_end()) < min_indent =>
|
if p.column(p.current_end()) < min_indent =>
|
||||||
{
|
{
|
||||||
break;
|
break;
|
||||||
@ -195,7 +195,7 @@ where
|
|||||||
p.perform(NodeKind::Markup { min_indent: usize::MAX }, |p| {
|
p.perform(NodeKind::Markup { min_indent: usize::MAX }, |p| {
|
||||||
let mut at_start = false;
|
let mut at_start = false;
|
||||||
while let Some(kind) = p.peek() {
|
while let Some(kind) = p.peek() {
|
||||||
if let NodeKind::Space { newlines: (1 ..) } = kind {
|
if let NodeKind::Space { newlines: (1..) } = kind {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -210,11 +210,7 @@ where
|
|||||||
|
|
||||||
/// Parse a markup node.
|
/// Parse a markup node.
|
||||||
fn markup_node(p: &mut Parser, at_start: &mut bool) {
|
fn markup_node(p: &mut Parser, at_start: &mut bool) {
|
||||||
let token = match p.peek() {
|
let Some(token) = p.peek() else { return };
|
||||||
Some(t) => t,
|
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
match token {
|
match token {
|
||||||
// Whitespace.
|
// Whitespace.
|
||||||
NodeKind::Space { newlines } => {
|
NodeKind::Space { newlines } => {
|
||||||
@ -316,7 +312,7 @@ fn heading(p: &mut Parser, at_start: bool) {
|
|||||||
markup_line(p, |kind| matches!(kind, NodeKind::Label(_)));
|
markup_line(p, |kind| matches!(kind, NodeKind::Label(_)));
|
||||||
marker.end(p, NodeKind::Heading);
|
marker.end(p, NodeKind::Heading);
|
||||||
} else {
|
} else {
|
||||||
let text = p.get(current_start .. p.prev_end()).into();
|
let text = p.get(current_start..p.prev_end()).into();
|
||||||
marker.convert(p, NodeKind::Text(text));
|
marker.convert(p, NodeKind::Text(text));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -420,12 +416,9 @@ fn math_node_prec(p: &mut Parser, min_prec: usize, stop: Option<NodeKind>) {
|
|||||||
Some(NodeKind::Underscore) => {
|
Some(NodeKind::Underscore) => {
|
||||||
(NodeKind::Script, 2, Assoc::Right, Some(NodeKind::Hat))
|
(NodeKind::Script, 2, Assoc::Right, Some(NodeKind::Hat))
|
||||||
}
|
}
|
||||||
Some(NodeKind::Hat) => (
|
Some(NodeKind::Hat) => {
|
||||||
NodeKind::Script,
|
(NodeKind::Script, 2, Assoc::Right, Some(NodeKind::Underscore))
|
||||||
2,
|
}
|
||||||
Assoc::Right,
|
|
||||||
Some(NodeKind::Underscore),
|
|
||||||
),
|
|
||||||
Some(NodeKind::Slash) => (NodeKind::Frac, 1, Assoc::Left, None),
|
Some(NodeKind::Slash) => (NodeKind::Frac, 1, Assoc::Left, None),
|
||||||
_ => break,
|
_ => break,
|
||||||
};
|
};
|
||||||
@ -454,11 +447,7 @@ fn math_node_prec(p: &mut Parser, min_prec: usize, stop: Option<NodeKind>) {
|
|||||||
|
|
||||||
/// Parse a primary math node.
|
/// Parse a primary math node.
|
||||||
fn math_primary(p: &mut Parser) {
|
fn math_primary(p: &mut Parser) {
|
||||||
let token = match p.peek() {
|
let Some(token) = p.peek() else { return };
|
||||||
Some(t) => t,
|
|
||||||
None => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
match token {
|
match token {
|
||||||
// Spaces, atoms and expressions.
|
// Spaces, atoms and expressions.
|
||||||
NodeKind::Space { .. }
|
NodeKind::Space { .. }
|
||||||
@ -652,7 +641,6 @@ fn literal(p: &mut Parser) -> bool {
|
|||||||
p.eat();
|
p.eat();
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -724,21 +712,26 @@ enum CollectionKind {
|
|||||||
/// Returns the length of the collection and whether the literal contained any
|
/// Returns the length of the collection and whether the literal contained any
|
||||||
/// commas.
|
/// commas.
|
||||||
fn collection(p: &mut Parser, keyed: bool) -> (CollectionKind, usize) {
|
fn collection(p: &mut Parser, keyed: bool) -> (CollectionKind, usize) {
|
||||||
let mut kind = None;
|
let mut collection_kind = None;
|
||||||
let mut items = 0;
|
let mut items = 0;
|
||||||
let mut can_group = true;
|
let mut can_group = true;
|
||||||
let mut missing_coma: Option<Marker> = None;
|
let mut missing_coma: Option<Marker> = None;
|
||||||
|
|
||||||
while !p.eof() {
|
while !p.eof() {
|
||||||
if let Ok(item_kind) = item(p, keyed) {
|
let Ok(item_kind) = item(p, keyed) else {
|
||||||
|
p.eat_if(NodeKind::Comma);
|
||||||
|
collection_kind = Some(CollectionKind::Group);
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
match item_kind {
|
match item_kind {
|
||||||
NodeKind::Spread => can_group = false,
|
NodeKind::Spread => can_group = false,
|
||||||
NodeKind::Named if kind.is_none() => {
|
NodeKind::Named if collection_kind.is_none() => {
|
||||||
kind = Some(CollectionKind::Named);
|
collection_kind = Some(CollectionKind::Named);
|
||||||
can_group = false;
|
can_group = false;
|
||||||
}
|
}
|
||||||
_ if kind.is_none() => {
|
_ if collection_kind.is_none() => {
|
||||||
kind = Some(CollectionKind::Positional);
|
collection_kind = Some(CollectionKind::Positional);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
@ -758,16 +751,12 @@ fn collection(p: &mut Parser, keyed: bool) -> (CollectionKind, usize) {
|
|||||||
} else {
|
} else {
|
||||||
missing_coma = Some(p.trivia_start());
|
missing_coma = Some(p.trivia_start());
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
p.eat_if(NodeKind::Comma);
|
|
||||||
kind = Some(CollectionKind::Group);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind = if can_group && items == 1 {
|
let kind = if can_group && items == 1 {
|
||||||
CollectionKind::Group
|
CollectionKind::Group
|
||||||
} else {
|
} else {
|
||||||
kind.unwrap_or(CollectionKind::Positional)
|
collection_kind.unwrap_or(CollectionKind::Positional)
|
||||||
};
|
};
|
||||||
|
|
||||||
(kind, items)
|
(kind, items)
|
||||||
|
@ -30,7 +30,6 @@ pub fn resolve_string(string: &str) -> EcoString {
|
|||||||
None => out.push_str(s.from(start)),
|
None => out.push_str(s.from(start)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => out.push_str(s.from(start)),
|
_ => out.push_str(s.from(start)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -66,10 +65,7 @@ pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawKind {
|
|||||||
/// Parse the lang tag and return it alongside the remaining inner raw text.
|
/// Parse the lang tag and return it alongside the remaining inner raw text.
|
||||||
fn split_at_lang_tag(raw: &str) -> (&str, &str) {
|
fn split_at_lang_tag(raw: &str) -> (&str, &str) {
|
||||||
let mut s = Scanner::new(raw);
|
let mut s = Scanner::new(raw);
|
||||||
(
|
(s.eat_until(|c: char| c == '`' || c.is_whitespace() || is_newline(c)), s.after())
|
||||||
s.eat_until(|c: char| c == '`' || c.is_whitespace() || is_newline(c)),
|
|
||||||
s.after(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Trim raw text and splits it into lines.
|
/// Trim raw text and splits it into lines.
|
||||||
@ -94,7 +90,7 @@ fn trim_and_split_raw(column: usize, mut raw: &str) -> (String, bool) {
|
|||||||
.take_while(|c| c.is_whitespace())
|
.take_while(|c| c.is_whitespace())
|
||||||
.map(char::len_utf8)
|
.map(char::len_utf8)
|
||||||
.sum();
|
.sum();
|
||||||
*line = &line[offset ..];
|
*line = &line[offset..];
|
||||||
}
|
}
|
||||||
|
|
||||||
let had_newline = lines.len() > 1;
|
let had_newline = lines.len() > 1;
|
||||||
@ -127,13 +123,13 @@ fn split_lines(text: &str) -> Vec<&str> {
|
|||||||
s.eat_if('\n');
|
s.eat_if('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
lines.push(&text[start .. end]);
|
lines.push(&text[start..end]);
|
||||||
start = s.cursor();
|
start = s.cursor();
|
||||||
}
|
}
|
||||||
end = s.cursor();
|
end = s.cursor();
|
||||||
}
|
}
|
||||||
|
|
||||||
lines.push(&text[start ..]);
|
lines.push(&text[start..]);
|
||||||
lines
|
lines
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,13 +123,13 @@ impl Source {
|
|||||||
self.lines.truncate(line + 1);
|
self.lines.truncate(line + 1);
|
||||||
|
|
||||||
// Handle adjoining of \r and \n.
|
// Handle adjoining of \r and \n.
|
||||||
if self.text[.. start_byte].ends_with('\r') && with.starts_with('\n') {
|
if self.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
|
||||||
self.lines.pop();
|
self.lines.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Recalculate the line starts after the edit.
|
// Recalculate the line starts after the edit.
|
||||||
self.lines
|
self.lines
|
||||||
.extend(lines(start_byte, start_utf16, &self.text[start_byte ..]));
|
.extend(lines(start_byte, start_utf16, &self.text[start_byte..]));
|
||||||
|
|
||||||
// Incrementally reparse the replaced range.
|
// Incrementally reparse the replaced range.
|
||||||
let mut root = std::mem::take(&mut self.root).into_inner();
|
let mut root = std::mem::take(&mut self.root).into_inner();
|
||||||
@ -146,7 +146,7 @@ impl Source {
|
|||||||
/// Get the length of the file in UTF-16 code units.
|
/// Get the length of the file in UTF-16 code units.
|
||||||
pub fn len_utf16(&self) -> usize {
|
pub fn len_utf16(&self) -> usize {
|
||||||
let last = self.lines.last().unwrap();
|
let last = self.lines.last().unwrap();
|
||||||
last.utf16_idx + self.text[last.byte_idx ..].len_utf16()
|
last.utf16_idx + self.text[last.byte_idx..].len_utf16()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the length of the file in lines.
|
/// Get the length of the file in lines.
|
||||||
@ -167,7 +167,7 @@ impl Source {
|
|||||||
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
|
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
|
||||||
let line_idx = self.byte_to_line(byte_idx)?;
|
let line_idx = self.byte_to_line(byte_idx)?;
|
||||||
let line = self.lines.get(line_idx)?;
|
let line = self.lines.get(line_idx)?;
|
||||||
let head = self.text.get(line.byte_idx .. byte_idx)?;
|
let head = self.text.get(line.byte_idx..byte_idx)?;
|
||||||
Some(line.utf16_idx + head.len_utf16())
|
Some(line.utf16_idx + head.len_utf16())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -188,7 +188,7 @@ impl Source {
|
|||||||
pub fn byte_to_column(&self, byte_idx: usize) -> Option<usize> {
|
pub fn byte_to_column(&self, byte_idx: usize) -> Option<usize> {
|
||||||
let line = self.byte_to_line(byte_idx)?;
|
let line = self.byte_to_line(byte_idx)?;
|
||||||
let start = self.line_to_byte(line)?;
|
let start = self.line_to_byte(line)?;
|
||||||
let head = self.get(start .. byte_idx)?;
|
let head = self.get(start..byte_idx)?;
|
||||||
Some(head.chars().count())
|
Some(head.chars().count())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -202,7 +202,7 @@ impl Source {
|
|||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut k = line.utf16_idx;
|
let mut k = line.utf16_idx;
|
||||||
for (i, c) in self.text[line.byte_idx ..].char_indices() {
|
for (i, c) in self.text[line.byte_idx..].char_indices() {
|
||||||
if k >= utf16_idx {
|
if k >= utf16_idx {
|
||||||
return Some(line.byte_idx + i);
|
return Some(line.byte_idx + i);
|
||||||
}
|
}
|
||||||
@ -212,7 +212,6 @@ impl Source {
|
|||||||
(k == utf16_idx).then(|| self.text.len())
|
(k == utf16_idx).then(|| self.text.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Return the byte position at which the given line starts.
|
/// Return the byte position at which the given line starts.
|
||||||
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
|
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
|
||||||
self.lines.get(line_idx).map(|line| line.byte_idx)
|
self.lines.get(line_idx).map(|line| line.byte_idx)
|
||||||
@ -222,7 +221,7 @@ impl Source {
|
|||||||
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
|
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
|
||||||
let start = self.line_to_byte(line_idx)?;
|
let start = self.line_to_byte(line_idx)?;
|
||||||
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text.len());
|
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text.len());
|
||||||
Some(start .. end)
|
Some(start..end)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the byte index of the given (line, column) pair.
|
/// Return the byte index of the given (line, column) pair.
|
||||||
@ -237,7 +236,7 @@ impl Source {
|
|||||||
let range = self.line_to_range(line_idx)?;
|
let range = self.line_to_range(line_idx)?;
|
||||||
let line = self.get(range.clone())?;
|
let line = self.get(range.clone())?;
|
||||||
let mut chars = line.chars();
|
let mut chars = line.chars();
|
||||||
for _ in 0 .. column_idx {
|
for _ in 0..column_idx {
|
||||||
chars.next();
|
chars.next();
|
||||||
}
|
}
|
||||||
Some(range.start + (line.len() - chars.as_str().len()))
|
Some(range.start + (line.len() - chars.as_str().len()))
|
||||||
@ -312,10 +311,7 @@ fn lines(
|
|||||||
utf16_idx += 1;
|
utf16_idx += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
Some(Line {
|
Some(Line { byte_idx: byte_offset + s.cursor(), utf16_idx })
|
||||||
byte_idx: byte_offset + s.cursor(),
|
|
||||||
utf16_idx,
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -328,12 +324,15 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_source_file_new() {
|
fn test_source_file_new() {
|
||||||
let source = Source::detached(TEST);
|
let source = Source::detached(TEST);
|
||||||
assert_eq!(source.lines, [
|
assert_eq!(
|
||||||
|
source.lines,
|
||||||
|
[
|
||||||
Line { byte_idx: 0, utf16_idx: 0 },
|
Line { byte_idx: 0, utf16_idx: 0 },
|
||||||
Line { byte_idx: 7, utf16_idx: 6 },
|
Line { byte_idx: 7, utf16_idx: 6 },
|
||||||
Line { byte_idx: 15, utf16_idx: 12 },
|
Line { byte_idx: 15, utf16_idx: 12 },
|
||||||
Line { byte_idx: 18, utf16_idx: 15 },
|
Line { byte_idx: 18, utf16_idx: 15 },
|
||||||
]);
|
]
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -411,20 +410,20 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Test inserting at the begining.
|
// Test inserting at the begining.
|
||||||
test("abc\n", 0 .. 0, "hi\n", "hi\nabc\n");
|
test("abc\n", 0..0, "hi\n", "hi\nabc\n");
|
||||||
test("\nabc", 0 .. 0, "hi\r", "hi\r\nabc");
|
test("\nabc", 0..0, "hi\r", "hi\r\nabc");
|
||||||
|
|
||||||
// Test editing in the middle.
|
// Test editing in the middle.
|
||||||
test(TEST, 4 .. 16, "❌", "ä\tc❌i\rjkl");
|
test(TEST, 4..16, "❌", "ä\tc❌i\rjkl");
|
||||||
|
|
||||||
// Test appending.
|
// Test appending.
|
||||||
test("abc\ndef", 7 .. 7, "hi", "abc\ndefhi");
|
test("abc\ndef", 7..7, "hi", "abc\ndefhi");
|
||||||
test("abc\ndef\n", 8 .. 8, "hi", "abc\ndef\nhi");
|
test("abc\ndef\n", 8..8, "hi", "abc\ndef\nhi");
|
||||||
|
|
||||||
// Test appending with adjoining \r and \n.
|
// Test appending with adjoining \r and \n.
|
||||||
test("abc\ndef\r", 8 .. 8, "\nghi", "abc\ndef\r\nghi");
|
test("abc\ndef\r", 8..8, "\nghi", "abc\ndef\r\nghi");
|
||||||
|
|
||||||
// Test removing everything.
|
// Test removing everything.
|
||||||
test(TEST, 0 .. 21, "", "");
|
test(TEST, 0..21, "", "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ impl Span {
|
|||||||
const DETACHED: u64 = 1;
|
const DETACHED: u64 = 1;
|
||||||
|
|
||||||
/// The full range of numbers available to spans.
|
/// The full range of numbers available to spans.
|
||||||
pub const FULL: Range<u64> = 2 .. (1 << Self::BITS);
|
pub const FULL: Range<u64> = 2..(1 << Self::BITS);
|
||||||
|
|
||||||
/// Create a new span from a source id and a unique number.
|
/// Create a new span from a source id and a unique number.
|
||||||
///
|
///
|
||||||
|
@ -373,7 +373,7 @@ impl<'s> Tokens<'s> {
|
|||||||
NodeKind::Raw(Arc::new(resolve_raw(
|
NodeKind::Raw(Arc::new(resolve_raw(
|
||||||
column,
|
column,
|
||||||
backticks,
|
backticks,
|
||||||
self.s.get(start .. end),
|
self.s.get(start..end),
|
||||||
)))
|
)))
|
||||||
} else {
|
} else {
|
||||||
self.terminated = false;
|
self.terminated = false;
|
||||||
@ -548,7 +548,7 @@ impl<'s> Tokens<'s> {
|
|||||||
self.s.eat_while(char::is_ascii_alphanumeric);
|
self.s.eat_while(char::is_ascii_alphanumeric);
|
||||||
}
|
}
|
||||||
|
|
||||||
let number = self.s.get(start .. suffix_start);
|
let number = self.s.get(start..suffix_start);
|
||||||
let suffix = self.s.from(suffix_start);
|
let suffix = self.s.from(suffix_start);
|
||||||
|
|
||||||
// Find out whether it is a simple number.
|
// Find out whether it is a simple number.
|
||||||
@ -558,9 +558,8 @@ impl<'s> Tokens<'s> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = match number.parse::<f64>() {
|
let Ok(v) = number.parse::<f64>() else {
|
||||||
Ok(v) => v,
|
return NodeKind::Error(ErrorPos::Full, "invalid number".into());
|
||||||
Err(_) => return NodeKind::Error(ErrorPos::Full, "invalid number".into()),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
match suffix {
|
match suffix {
|
||||||
@ -636,7 +635,7 @@ fn keyword(ident: &str) -> Option<NodeKind> {
|
|||||||
#[inline]
|
#[inline]
|
||||||
fn column(string: &str, index: usize, offset: usize) -> usize {
|
fn column(string: &str, index: usize, offset: usize) -> usize {
|
||||||
let mut apply_offset = false;
|
let mut apply_offset = false;
|
||||||
let res = string[.. index]
|
let res = string[..index]
|
||||||
.char_indices()
|
.char_indices()
|
||||||
.rev()
|
.rev()
|
||||||
.take_while(|&(_, c)| !is_newline(c))
|
.take_while(|&(_, c)| !is_newline(c))
|
||||||
@ -653,7 +652,11 @@ fn column(string: &str, index: usize, offset: usize) -> usize {
|
|||||||
apply_offset = true;
|
apply_offset = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if apply_offset { res + offset } else { res }
|
if apply_offset {
|
||||||
|
res + offset
|
||||||
|
} else {
|
||||||
|
res
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether this character denotes a newline.
|
/// Whether this character denotes a newline.
|
||||||
@ -767,8 +770,8 @@ mod tests {
|
|||||||
// - mode in which the suffix is applicable
|
// - mode in which the suffix is applicable
|
||||||
// - the suffix string
|
// - the suffix string
|
||||||
// - the resulting suffix NodeKind
|
// - the resulting suffix NodeKind
|
||||||
fn suffixes()
|
fn suffixes(
|
||||||
-> impl Iterator<Item = (char, Option<TokenMode>, &'static str, NodeKind)> {
|
) -> impl Iterator<Item = (char, Option<TokenMode>, &'static str, NodeKind)> {
|
||||||
[
|
[
|
||||||
// Whitespace suffixes.
|
// Whitespace suffixes.
|
||||||
(' ', None, " ", Space(0)),
|
(' ', None, " ", Space(0)),
|
||||||
|
@ -68,7 +68,7 @@ impl EcoString {
|
|||||||
let len = slice.len();
|
let len = slice.len();
|
||||||
Self(if len <= LIMIT {
|
Self(if len <= LIMIT {
|
||||||
let mut buf = [0; LIMIT];
|
let mut buf = [0; LIMIT];
|
||||||
buf[.. len].copy_from_slice(slice.as_bytes());
|
buf[..len].copy_from_slice(slice.as_bytes());
|
||||||
Repr::Small { buf, len: len as u8 }
|
Repr::Small { buf, len: len as u8 }
|
||||||
} else {
|
} else {
|
||||||
Repr::Large(Arc::new(s.into()))
|
Repr::Large(Arc::new(s.into()))
|
||||||
@ -116,7 +116,7 @@ impl EcoString {
|
|||||||
let prev = usize::from(*len);
|
let prev = usize::from(*len);
|
||||||
let new = prev + string.len();
|
let new = prev + string.len();
|
||||||
if new <= LIMIT {
|
if new <= LIMIT {
|
||||||
buf[prev .. new].copy_from_slice(string.as_bytes());
|
buf[prev..new].copy_from_slice(string.as_bytes());
|
||||||
*len = new as u8;
|
*len = new as u8;
|
||||||
} else {
|
} else {
|
||||||
let mut spilled = String::with_capacity(new);
|
let mut spilled = String::with_capacity(new);
|
||||||
@ -161,7 +161,7 @@ impl EcoString {
|
|||||||
pub fn to_lowercase(&self) -> Self {
|
pub fn to_lowercase(&self) -> Self {
|
||||||
if let Repr::Small { mut buf, len } = self.0 {
|
if let Repr::Small { mut buf, len } = self.0 {
|
||||||
if self.is_ascii() {
|
if self.is_ascii() {
|
||||||
buf[.. usize::from(len)].make_ascii_lowercase();
|
buf[..usize::from(len)].make_ascii_lowercase();
|
||||||
return Self(Repr::Small { buf, len });
|
return Self(Repr::Small { buf, len });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -173,7 +173,7 @@ impl EcoString {
|
|||||||
pub fn to_uppercase(&self) -> Self {
|
pub fn to_uppercase(&self) -> Self {
|
||||||
if let Repr::Small { mut buf, len } = self.0 {
|
if let Repr::Small { mut buf, len } = self.0 {
|
||||||
if self.is_ascii() {
|
if self.is_ascii() {
|
||||||
buf[.. usize::from(len)].make_ascii_uppercase();
|
buf[..usize::from(len)].make_ascii_uppercase();
|
||||||
return Self(Repr::Small { buf, len });
|
return Self(Repr::Small { buf, len });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -191,10 +191,10 @@ impl EcoString {
|
|||||||
let prev = usize::from(len);
|
let prev = usize::from(len);
|
||||||
let new = prev.saturating_mul(n);
|
let new = prev.saturating_mul(n);
|
||||||
if new <= LIMIT {
|
if new <= LIMIT {
|
||||||
let src = &buf[.. prev];
|
let src = &buf[..prev];
|
||||||
let mut buf = [0; LIMIT];
|
let mut buf = [0; LIMIT];
|
||||||
for i in 0 .. n {
|
for i in 0..n {
|
||||||
buf[prev * i .. prev * (i + 1)].copy_from_slice(src);
|
buf[prev * i..prev * (i + 1)].copy_from_slice(src);
|
||||||
}
|
}
|
||||||
return Self(Repr::Small { buf, len: new as u8 });
|
return Self(Repr::Small { buf, len: new as u8 });
|
||||||
}
|
}
|
||||||
@ -217,7 +217,7 @@ impl Deref for EcoString {
|
|||||||
// Furthermore, we still do the bounds-check on the len in case
|
// Furthermore, we still do the bounds-check on the len in case
|
||||||
// it gets corrupted somehow.
|
// it gets corrupted somehow.
|
||||||
Repr::Small { buf, len } => unsafe {
|
Repr::Small { buf, len } => unsafe {
|
||||||
std::str::from_utf8_unchecked(&buf[.. usize::from(*len)])
|
std::str::from_utf8_unchecked(&buf[..usize::from(*len)])
|
||||||
},
|
},
|
||||||
Repr::Large(string) => string.as_str(),
|
Repr::Large(string) => string.as_str(),
|
||||||
}
|
}
|
||||||
@ -398,9 +398,9 @@ mod tests {
|
|||||||
assert_eq!(EcoString::from("abc"), "abc");
|
assert_eq!(EcoString::from("abc"), "abc");
|
||||||
|
|
||||||
// Test around the inline limit.
|
// Test around the inline limit.
|
||||||
assert_eq!(EcoString::from(&ALPH[.. LIMIT - 1]), ALPH[.. LIMIT - 1]);
|
assert_eq!(EcoString::from(&ALPH[..LIMIT - 1]), ALPH[..LIMIT - 1]);
|
||||||
assert_eq!(EcoString::from(&ALPH[.. LIMIT]), ALPH[.. LIMIT]);
|
assert_eq!(EcoString::from(&ALPH[..LIMIT]), ALPH[..LIMIT]);
|
||||||
assert_eq!(EcoString::from(&ALPH[.. LIMIT + 1]), ALPH[.. LIMIT + 1]);
|
assert_eq!(EcoString::from(&ALPH[..LIMIT + 1]), ALPH[..LIMIT + 1]);
|
||||||
|
|
||||||
// Test heap string.
|
// Test heap string.
|
||||||
assert_eq!(EcoString::from(ALPH), ALPH);
|
assert_eq!(EcoString::from(ALPH), ALPH);
|
||||||
@ -443,7 +443,7 @@ mod tests {
|
|||||||
assert_eq!(v, "Hello World");
|
assert_eq!(v, "Hello World");
|
||||||
|
|
||||||
// Remove one-by-one.
|
// Remove one-by-one.
|
||||||
for _ in 0 .. 10 {
|
for _ in 0..10 {
|
||||||
v.pop();
|
v.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -462,7 +462,7 @@ mod tests {
|
|||||||
fn test_str_index() {
|
fn test_str_index() {
|
||||||
// Test that we can use the index syntax.
|
// Test that we can use the index syntax.
|
||||||
let v = EcoString::from("abc");
|
let v = EcoString::from("abc");
|
||||||
assert_eq!(&v[.. 2], "ab");
|
assert_eq!(&v[..2], "ab");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
//! pointer metadata APIs are stable, we should definitely move to them:
|
//! pointer metadata APIs are stable, we should definitely move to them:
|
||||||
//! <https://github.com/rust-lang/rust/issues/81513>
|
//! <https://github.com/rust-lang/rust/issues/81513>
|
||||||
|
|
||||||
use std::alloc;
|
use std::alloc::Layout;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
/// Create a fat pointer from a data address and a vtable address.
|
/// Create a fat pointer from a data address and a vtable address.
|
||||||
@ -15,12 +15,8 @@ use std::mem;
|
|||||||
/// to a value whose type implements the trait of `T` and the `vtable` must have
|
/// to a value whose type implements the trait of `T` and the `vtable` must have
|
||||||
/// been extracted with [`vtable`].
|
/// been extracted with [`vtable`].
|
||||||
pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *const T {
|
pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *const T {
|
||||||
debug_assert_eq!(
|
|
||||||
alloc::Layout::new::<*const T>(),
|
|
||||||
alloc::Layout::new::<FatPointer>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let fat = FatPointer { data, vtable };
|
let fat = FatPointer { data, vtable };
|
||||||
|
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
|
||||||
mem::transmute_copy::<FatPointer, *const T>(&fat)
|
mem::transmute_copy::<FatPointer, *const T>(&fat)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,12 +27,8 @@ pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *
|
|||||||
/// to a value whose type implements the trait of `T` and the `vtable` must have
|
/// to a value whose type implements the trait of `T` and the `vtable` must have
|
||||||
/// been extracted with [`vtable`].
|
/// been extracted with [`vtable`].
|
||||||
pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) -> *mut T {
|
pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) -> *mut T {
|
||||||
debug_assert_eq!(
|
|
||||||
alloc::Layout::new::<*mut T>(),
|
|
||||||
alloc::Layout::new::<FatPointer>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let fat = FatPointer { data, vtable };
|
let fat = FatPointer { data, vtable };
|
||||||
|
debug_assert_eq!(Layout::new::<*mut T>(), Layout::new::<FatPointer>());
|
||||||
mem::transmute_copy::<FatPointer, *mut T>(&fat)
|
mem::transmute_copy::<FatPointer, *mut T>(&fat)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,11 +37,7 @@ pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) ->
|
|||||||
/// # Safety
|
/// # Safety
|
||||||
/// Must only be called when `T` is a `dyn Trait`.
|
/// Must only be called when `T` is a `dyn Trait`.
|
||||||
pub unsafe fn vtable<T: ?Sized>(ptr: *const T) -> *const () {
|
pub unsafe fn vtable<T: ?Sized>(ptr: *const T) -> *const () {
|
||||||
debug_assert_eq!(
|
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
|
||||||
alloc::Layout::new::<*const T>(),
|
|
||||||
alloc::Layout::new::<FatPointer>(),
|
|
||||||
);
|
|
||||||
|
|
||||||
mem::transmute_copy::<*const T, FatPointer>(&ptr).vtable
|
mem::transmute_copy::<*const T, FatPointer>(&ptr).vtable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
|
|
||||||
pub mod fat;
|
pub mod fat;
|
||||||
|
|
||||||
pub use buffer::Buffer;
|
|
||||||
pub use eco::{format_eco, EcoString};
|
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod eco;
|
mod eco;
|
||||||
mod buffer;
|
mod buffer;
|
||||||
|
|
||||||
|
pub use buffer::Buffer;
|
||||||
|
pub use eco::{format_eco, EcoString};
|
||||||
|
|
||||||
use std::any::TypeId;
|
use std::any::TypeId;
|
||||||
use std::fmt::{self, Debug, Formatter};
|
use std::fmt::{self, Debug, Formatter};
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
|
@ -58,7 +58,7 @@ fn bench_parse(iai: &mut Iai) {
|
|||||||
|
|
||||||
fn bench_edit(iai: &mut Iai) {
|
fn bench_edit(iai: &mut Iai) {
|
||||||
let mut source = Source::detached(TEXT);
|
let mut source = Source::detached(TEXT);
|
||||||
iai.run(|| black_box(source.edit(1168 .. 1171, "_Uhr_")));
|
iai.run(|| black_box(source.edit(1168..1171, "_Uhr_")));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bench_highlight(iai: &mut Iai) {
|
fn bench_highlight(iai: &mut Iai) {
|
||||||
@ -66,7 +66,7 @@ fn bench_highlight(iai: &mut Iai) {
|
|||||||
iai.run(|| {
|
iai.run(|| {
|
||||||
typst::syntax::highlight::highlight_categories(
|
typst::syntax::highlight::highlight_categories(
|
||||||
source.root(),
|
source.root(),
|
||||||
0 .. source.len_bytes(),
|
0..source.len_bytes(),
|
||||||
&mut |_, _| {},
|
&mut |_, _| {},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
@ -75,13 +75,8 @@ fn main() {
|
|||||||
let pdf_path =
|
let pdf_path =
|
||||||
args.pdf.then(|| Path::new(PDF_DIR).join(path).with_extension("pdf"));
|
args.pdf.then(|| Path::new(PDF_DIR).join(path).with_extension("pdf"));
|
||||||
|
|
||||||
ok += test(
|
ok += test(&mut world, &src_path, &png_path, &ref_path, pdf_path.as_deref())
|
||||||
&mut world,
|
as usize;
|
||||||
&src_path,
|
|
||||||
&png_path,
|
|
||||||
&ref_path,
|
|
||||||
pdf_path.as_deref(),
|
|
||||||
) as usize;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len > 1 {
|
if len > 1 {
|
||||||
@ -153,10 +148,8 @@ fn config() -> Config {
|
|||||||
let mut styles = typst_library::styles();
|
let mut styles = typst_library::styles();
|
||||||
styles.set(PageNode::WIDTH, Smart::Custom(Abs::pt(120.0).into()));
|
styles.set(PageNode::WIDTH, Smart::Custom(Abs::pt(120.0).into()));
|
||||||
styles.set(PageNode::HEIGHT, Smart::Auto);
|
styles.set(PageNode::HEIGHT, Smart::Auto);
|
||||||
styles.set(
|
styles
|
||||||
PageNode::MARGINS,
|
.set(PageNode::MARGINS, Sides::splat(Some(Smart::Custom(Abs::pt(10.0).into()))));
|
||||||
Sides::splat(Some(Smart::Custom(Abs::pt(10.0).into()))),
|
|
||||||
);
|
|
||||||
styles.set(TextNode::SIZE, TextSize(Abs::pt(10.0).into()));
|
styles.set(TextNode::SIZE, TextSize(Abs::pt(10.0).into()));
|
||||||
|
|
||||||
// Hook up helpers into the global scope.
|
// Hook up helpers into the global scope.
|
||||||
@ -217,7 +210,7 @@ impl TestWorld {
|
|||||||
.filter(|entry| entry.file_type().is_file())
|
.filter(|entry| entry.file_type().is_file())
|
||||||
{
|
{
|
||||||
let buffer: Buffer = fs::read(entry.path()).unwrap().into();
|
let buffer: Buffer = fs::read(entry.path()).unwrap().into();
|
||||||
for index in 0 .. ttf_parser::fonts_in_collection(&buffer).unwrap_or(1) {
|
for index in 0..ttf_parser::fonts_in_collection(&buffer).unwrap_or(1) {
|
||||||
fonts.push(Font::new(buffer.clone(), index).unwrap())
|
fonts.push(Font::new(buffer.clone(), index).unwrap())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -480,18 +473,12 @@ fn parse_metadata(source: &Source) -> (Option<bool>, Vec<(Range<usize>, String)>
|
|||||||
compare_ref = Some(true);
|
compare_ref = Some(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
let rest = if let Some(rest) = line.strip_prefix("// Error: ") {
|
|
||||||
rest
|
|
||||||
} else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
fn num(s: &mut Scanner) -> usize {
|
fn num(s: &mut Scanner) -> usize {
|
||||||
s.eat_while(char::is_numeric).parse().unwrap()
|
s.eat_while(char::is_numeric).parse().unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
let comments =
|
let comments =
|
||||||
lines[i ..].iter().take_while(|line| line.starts_with("//")).count();
|
lines[i..].iter().take_while(|line| line.starts_with("//")).count();
|
||||||
|
|
||||||
let pos = |s: &mut Scanner| -> usize {
|
let pos = |s: &mut Scanner| -> usize {
|
||||||
let first = num(s) - 1;
|
let first = num(s) - 1;
|
||||||
@ -501,10 +488,11 @@ fn parse_metadata(source: &Source) -> (Option<bool>, Vec<(Range<usize>, String)>
|
|||||||
source.line_column_to_byte(line, column).unwrap()
|
source.line_column_to_byte(line, column).unwrap()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let Some(rest) = line.strip_prefix("// Error: ") else { continue };
|
||||||
let mut s = Scanner::new(rest);
|
let mut s = Scanner::new(rest);
|
||||||
let start = pos(&mut s);
|
let start = pos(&mut s);
|
||||||
let end = if s.eat_if('-') { pos(&mut s) } else { start };
|
let end = if s.eat_if('-') { pos(&mut s) } else { start };
|
||||||
let range = start .. end;
|
let range = start..end;
|
||||||
|
|
||||||
errors.push((range, s.after().trim().to_string()));
|
errors.push((range, s.after().trim().to_string()));
|
||||||
}
|
}
|
||||||
@ -582,10 +570,7 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
|
|||||||
);
|
);
|
||||||
println!(" Expected reference tree:\n{ref_root:#?}\n");
|
println!(" Expected reference tree:\n{ref_root:#?}\n");
|
||||||
println!(" Found incremental tree:\n{incr_root:#?}");
|
println!(" Found incremental tree:\n{incr_root:#?}");
|
||||||
println!(
|
println!(" Full source ({}):\n\"{edited_src:?}\"", edited_src.len());
|
||||||
" Full source ({}):\n\"{edited_src:?}\"",
|
|
||||||
edited_src.len()
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ok &= test_spans(ref_root);
|
ok &= test_spans(ref_root);
|
||||||
@ -599,23 +584,23 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let insertions = (text.len() as f64 / 400.0).ceil() as usize;
|
let insertions = (text.len() as f64 / 400.0).ceil() as usize;
|
||||||
for _ in 0 .. insertions {
|
for _ in 0..insertions {
|
||||||
let supplement = supplements[pick(0 .. supplements.len())];
|
let supplement = supplements[pick(0..supplements.len())];
|
||||||
let start = pick(0 .. text.len());
|
let start = pick(0..text.len());
|
||||||
let end = pick(start .. text.len());
|
let end = pick(start..text.len());
|
||||||
|
|
||||||
if !text.is_char_boundary(start) || !text.is_char_boundary(end) {
|
if !text.is_char_boundary(start) || !text.is_char_boundary(end) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
ok &= apply(start .. end, supplement);
|
ok &= apply(start..end, supplement);
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = Source::detached(text);
|
let source = Source::detached(text);
|
||||||
let leafs = source.root().leafs();
|
let leafs = source.root().leafs();
|
||||||
let start = source.range(leafs[pick(0 .. leafs.len())].span()).start;
|
let start = source.range(leafs[pick(0..leafs.len())].span()).start;
|
||||||
let supplement = supplements[pick(0 .. supplements.len())];
|
let supplement = supplements[pick(0..supplements.len())];
|
||||||
ok &= apply(start .. start, supplement);
|
ok &= apply(start..start, supplement);
|
||||||
|
|
||||||
ok
|
ok
|
||||||
}
|
}
|
||||||
@ -623,24 +608,21 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
|
|||||||
/// Ensure that all spans are properly ordered (and therefore unique).
|
/// Ensure that all spans are properly ordered (and therefore unique).
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn test_spans(root: &SyntaxNode) -> bool {
|
fn test_spans(root: &SyntaxNode) -> bool {
|
||||||
test_spans_impl(root, 0 .. u64::MAX)
|
test_spans_impl(root, 0..u64::MAX)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
fn test_spans_impl(node: &SyntaxNode, within: Range<u64>) -> bool {
|
fn test_spans_impl(node: &SyntaxNode, within: Range<u64>) -> bool {
|
||||||
if !within.contains(&node.span().number()) {
|
if !within.contains(&node.span().number()) {
|
||||||
eprintln!(" Node: {node:#?}");
|
eprintln!(" Node: {node:#?}");
|
||||||
eprintln!(
|
eprintln!(" Wrong span order: {} not in {within:?} ❌", node.span().number(),);
|
||||||
" Wrong span order: {} not in {within:?} ❌",
|
|
||||||
node.span().number(),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = node.span().number() + 1;
|
let start = node.span().number() + 1;
|
||||||
let mut children = node.children().peekable();
|
let mut children = node.children().peekable();
|
||||||
while let Some(child) = children.next() {
|
while let Some(child) = children.next() {
|
||||||
let end = children.peek().map_or(within.end, |next| next.span().number());
|
let end = children.peek().map_or(within.end, |next| next.span().number());
|
||||||
if !test_spans_impl(child, start .. end) {
|
if !test_spans_impl(child, start..end) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user