Style changes

This commit is contained in:
Laurenz 2022-11-04 09:30:44 +01:00
parent 33928a00dc
commit eb951c008b
65 changed files with 522 additions and 706 deletions

View File

@ -115,10 +115,7 @@ fn parse_args() -> StrResult<Command> {
// Don't allow excess arguments.
let rest = args.finish();
if !rest.is_empty() {
Err(format!(
"unexpected argument{}",
if rest.len() > 1 { "s" } else { "" }
))?;
Err(format!("unexpected argument{}", if rest.len() > 1 { "s" } else { "" }))?;
}
Ok(command)

View File

@ -101,14 +101,12 @@ pub fn mod_(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
(Value::Int(a), Value::Float(b)) => (a as f64, b),
(Value::Float(a), Value::Int(b)) => (a, b as f64),
(Value::Float(a), Value::Float(b)) => (a, b),
(Value::Int(_), b) | (Value::Float(_), b) => bail!(
span2,
format!("expected integer or float, found {}", b.type_name())
),
(a, _) => bail!(
span1,
format!("expected integer or float, found {}", a.type_name())
),
(Value::Int(_), b) | (Value::Float(_), b) => {
bail!(span2, format!("expected integer or float, found {}", b.type_name()))
}
(a, _) => {
bail!(span1, format!("expected integer or float, found {}", a.type_name()))
}
};
if b == 0.0 {

View File

@ -10,8 +10,7 @@ pub fn luma(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
/// Create an RGB(A) color.
pub fn rgb(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
Ok(Value::Color(
if let Some(string) = args.find::<Spanned<EcoString>>()? {
Ok(Value::Color(if let Some(string) = args.find::<Spanned<EcoString>>()? {
match RgbaColor::from_str(&string.v) {
Ok(color) => color.into(),
Err(msg) => bail!(string.span, msg),
@ -22,8 +21,7 @@ pub fn rgb(_: &mut Vm, args: &mut Args) -> SourceResult<Value> {
let Component(b) = args.expect("blue component")?;
let Component(a) = args.eat()?.unwrap_or(Component(255));
RgbaColor::new(r, g, b, a).into()
},
))
}))
}
/// Create a CMYK color.

View File

@ -81,10 +81,7 @@ fn convert_json(value: serde_json::Value) -> Value {
/// Format the user-facing JSON error message.
fn format_json_error(error: serde_json::Error) -> String {
assert!(error.is_syntax() || error.is_eof());
format!(
"failed to parse json file: syntax error in line {}",
error.line()
)
format!("failed to parse json file: syntax error in line {}", error.line())
}
/// Read structured data from an XML file.

View File

@ -5,10 +5,10 @@ mod color;
mod data;
mod string;
pub use calc::*;
pub use color::*;
pub use data::*;
pub use string::*;
pub use self::calc::*;
pub use self::color::*;
pub use self::data::*;
pub use self::string::*;
use comemo::Track;
use typst::model::{Eval, Route, Scopes, Vm};

View File

@ -55,11 +55,7 @@ impl ContentExt for Content {
let mut seq = vec![];
if let Some(above) = above {
seq.push(
layout::VNode {
amount: above.into(),
weak: true,
generated: true,
}
layout::VNode { amount: above.into(), weak: true, generated: true }
.pack(),
);
}
@ -67,11 +63,7 @@ impl ContentExt for Content {
seq.push(self);
if let Some(below) = below {
seq.push(
layout::VNode {
amount: below.into(),
weak: true,
generated: true,
}
layout::VNode { amount: below.into(), weak: true, generated: true }
.pack(),
);
}

View File

@ -5,7 +5,7 @@ mod image;
mod line;
mod shape;
pub use self::hide::*;
pub use self::image::*;
pub use hide::*;
pub use line::*;
pub use shape::*;
pub use self::line::*;
pub use self::shape::*;

View File

@ -74,12 +74,8 @@ impl LayoutBlock for ColumnsNode {
let mut output = Frame::new(Size::new(regions.first.x, height));
let mut cursor = Abs::zero();
for _ in 0 .. columns {
let frame = match frames.next() {
Some(frame) => frame,
None => break,
};
for _ in 0..columns {
let Some(frame) = frames.next() else { break };
if !regions.expand.y {
output.size_mut().y.set_max(frame.height());
}

View File

@ -170,13 +170,13 @@ impl<'a> GridLayouter<'a> {
};
// Collect content and gutter columns.
for x in 0 .. c {
for x in 0..c {
cols.push(get_or(tracks.x, x, auto));
cols.push(get_or(gutter.x, x, zero));
}
// Collect content and gutter rows.
for y in 0 .. r {
for y in 0..r {
rows.push(get_or(tracks.y, y, auto));
rows.push(get_or(gutter.y, y, zero));
}
@ -214,7 +214,7 @@ impl<'a> GridLayouter<'a> {
fn layout(mut self) -> SourceResult<Vec<Frame>> {
self.measure_columns()?;
for y in 0 .. self.rows.len() {
for y in 0..self.rows.len() {
// Skip to next region if current one is full, but only for content
// rows, not for gutter rows.
if y % 2 == 0 && self.regions.is_full() {
@ -295,7 +295,7 @@ impl<'a> GridLayouter<'a> {
}
let mut resolved = Abs::zero();
for y in 0 .. self.rows.len() {
for y in 0..self.rows.len() {
if let Some(cell) = self.cell(x, y) {
let size = Size::new(available, self.regions.base.y);
let mut pod =
@ -412,7 +412,7 @@ impl<'a> GridLayouter<'a> {
// eaten up by any fr rows.
if self.fr.is_zero() {
let len = resolved.len();
for (region, target) in self.regions.iter().zip(&mut resolved[.. len - 1]) {
for (region, target) in self.regions.iter().zip(&mut resolved[..len - 1]) {
target.set_max(region.y);
}
}
@ -502,7 +502,7 @@ impl<'a> GridLayouter<'a> {
// Prepare regions.
let size = Size::new(self.used.x, heights[0]);
let mut pod = Regions::one(size, self.regions.base, Axes::splat(true));
pod.backlog = heights[1 ..].to_vec();
pod.backlog = heights[1..].to_vec();
// Layout the row.
let mut pos = Point::zero();

View File

@ -12,17 +12,17 @@ mod spacing;
mod stack;
mod transform;
pub use align::*;
pub use columns::*;
pub use container::*;
pub use flow::*;
pub use grid::*;
pub use pad::*;
pub use page::*;
pub use place::*;
pub use spacing::*;
pub use stack::*;
pub use transform::*;
pub use self::align::*;
pub use self::columns::*;
pub use self::container::*;
pub use self::flow::*;
pub use self::grid::*;
pub use self::pad::*;
pub use self::page::*;
pub use self::place::*;
pub use self::spacing::*;
pub use self::stack::*;
pub use self::transform::*;
use std::mem;
@ -357,7 +357,10 @@ impl<'a> Builder<'a> {
content: &'a Content,
styles: StyleChain<'a>,
) -> SourceResult<bool> {
if let Some(mut realized) = styles.apply(self.world, Target::Node(content))? {
let Some(mut realized) = styles.apply(self.world, Target::Node(content))? else {
return Ok(false);
};
let mut map = StyleMap::new();
let barrier = Barrier::new(content.id());
map.push(StyleEntry::Barrier(barrier));
@ -365,10 +368,8 @@ impl<'a> Builder<'a> {
realized = realized.styled_with_map(map);
let stored = self.scratch.templates.alloc(realized);
self.accept(stored, styles)?;
Ok(true)
} else {
Ok(false)
}
}
fn styled(
@ -466,10 +467,7 @@ impl<'a> DocBuilder<'a> {
impl Default for DocBuilder<'_> {
fn default() -> Self {
Self {
pages: StyleVecBuilder::new(),
keep_next: true,
}
Self { pages: StyleVecBuilder::new(), keep_next: true }
}
}
@ -658,30 +656,25 @@ impl<'a> ListBuilder<'a> {
{
self.items.push(item.clone(), styles);
self.tight &= self.staged.drain(..).all(|(t, _)| !t.is::<ParbreakNode>());
} else {
return false;
return true;
}
} else if !self.items.is_empty()
&& (content.is::<SpaceNode>() || content.is::<ParbreakNode>())
{
self.staged.push((content, styles));
} else {
return false;
return true;
}
true
false
}
fn finish(self, parent: &mut Builder<'a>) -> SourceResult<()> {
let (items, shared) = self.items.finish();
let kind = match items.items().next() {
Some(item) => item.kind(),
None => return Ok(()),
};
let Some(item) = items.items().next() else { return Ok(()) };
let tight = self.tight;
let attached = tight && self.attachable;
let content = match kind {
let content = match item.kind() {
LIST => ListNode::<LIST> { tight, attached, items }.pack(),
ENUM => ListNode::<ENUM> { tight, attached, items }.pack(),
DESC | _ => ListNode::<DESC> { tight, attached, items }.pack(),
@ -765,18 +758,15 @@ impl<'a, T> CollapsingBuilder<'a, T> {
}
if self.last == Last::Weak {
if let Some(i) =
self.staged.iter().position(|(prev_item, _, prev_weakness)| {
let weak = self.staged.iter().position(|(prev_item, _, prev_weakness)| {
prev_weakness.map_or(false, |prev_weakness| {
weakness < prev_weakness
|| (weakness == prev_weakness && item > *prev_item)
})
})
{
self.staged.remove(i);
} else {
return;
}
});
let Some(weak) = weak else { return };
self.staged.remove(weak);
}
self.staged.push((item, styles, Some(weakness)));

View File

@ -111,12 +111,7 @@ impl PageNode {
let pw = size.x - pad.left - pad.right;
let py = size.y - pad.bottom;
for (role, marginal, pos, area) in [
(
Role::Header,
header,
Point::with_x(pad.left),
Size::new(pw, pad.top),
),
(Role::Header, header, Point::with_x(pad.left), Size::new(pw, pad.top)),
(
Role::Footer,
footer,

View File

@ -69,12 +69,7 @@ impl LayoutInline for MathNode {
_: &Regions,
styles: StyleChain,
) -> SourceResult<Vec<Frame>> {
Ok(vec![layout_tex(
&self.texify(),
self.display,
world,
styles,
)?])
Ok(vec![layout_tex(&self.texify(), self.display, world, styles)?])
}
}
@ -181,7 +176,7 @@ fn escape_char(c: char) -> EcoString {
/// Trim grouping parenthesis≤.
fn unparen(s: EcoString) -> EcoString {
if s.starts_with('(') && s.ends_with(')') {
s[1 .. s.len() - 1].into()
s[1..s.len() - 1].into()
} else {
s
}

View File

@ -95,9 +95,9 @@ impl<const L: ListKind> Show for ListNode<L> {
match name {
"tight" => Some(Value::Bool(self.tight)),
"attached" => Some(Value::Bool(self.attached)),
"items" => Some(Value::Array(
self.items.items().map(|item| item.encode()).collect(),
)),
"items" => {
Some(Value::Array(self.items.items().map(|item| item.encode()).collect()))
}
_ => None,
}
}
@ -139,11 +139,7 @@ impl<const L: ListKind> Show for ListNode<L> {
ListItem::List(body) => body.as_ref().clone(),
ListItem::Enum(_, body) => body.as_ref().clone(),
ListItem::Desc(item) => Content::sequence(vec![
HNode {
amount: (-body_indent).into(),
weak: false,
}
.pack(),
HNode { amount: (-body_indent).into(), weak: false }.pack(),
(item.term.clone() + TextNode(':'.into()).pack()).strong(),
SpaceNode.pack(),
item.body.clone(),

View File

@ -6,8 +6,8 @@ mod list;
mod reference;
mod table;
pub use doc::*;
pub use heading::*;
pub use list::*;
pub use reference::*;
pub use table::*;
pub use self::doc::*;
pub use self::heading::*;
pub use self::list::*;
pub use self::reference::*;
pub use self::table::*;

View File

@ -56,13 +56,16 @@ impl<const L: DecoLine> Show for DecoNode<L> {
_: Tracked<dyn World>,
styles: StyleChain,
) -> SourceResult<Content> {
Ok(self.0.clone().styled(TextNode::DECO, Decoration {
Ok(self.0.clone().styled(
TextNode::DECO,
Decoration {
line: L,
stroke: styles.get(Self::STROKE).unwrap_or_default(),
offset: styles.get(Self::OFFSET),
extent: styles.get(Self::EXTENT),
evade: styles.get(Self::EVADE),
}))
},
))
}
}

View File

@ -8,12 +8,12 @@ mod raw;
mod shaping;
mod shift;
pub use deco::*;
pub use link::*;
pub use par::*;
pub use raw::*;
pub use shaping::*;
pub use shift::*;
pub use self::deco::*;
pub use self::link::*;
pub use self::par::*;
pub use self::raw::*;
pub use self::shaping::*;
pub use self::shift::*;
use std::borrow::Cow;
@ -152,7 +152,7 @@ impl TextNode {
if count > 0 {
let mut list = Vec::with_capacity(count);
for _ in 0 .. count {
for _ in 0..count {
list.push(args.find()?.unwrap());
}

View File

@ -222,7 +222,7 @@ impl<'a> Preparation<'a> {
let mut cursor = 0;
for item in &self.items {
let end = cursor + item.len();
if (cursor .. end).contains(&text_offset) {
if (cursor..end).contains(&text_offset) {
return Some(item);
}
cursor = end;
@ -256,7 +256,7 @@ impl<'a> Preparation<'a> {
cursor += len;
}
(expanded, &self.items[start .. end])
(expanded, &self.items[start..end])
}
}
@ -500,11 +500,14 @@ fn prepare<'a>(
regions: &Regions,
styles: StyleChain<'a>,
) -> SourceResult<Preparation<'a>> {
let bidi = BidiInfo::new(text, match styles.get(TextNode::DIR) {
let bidi = BidiInfo::new(
text,
match styles.get(TextNode::DIR) {
Dir::LTR => Some(BidiLevel::ltr()),
Dir::RTL => Some(BidiLevel::rtl()),
_ => None,
});
},
);
let mut cursor = 0;
let mut items = vec![];
@ -514,7 +517,7 @@ fn prepare<'a>(
let end = cursor + segment.len();
match segment {
Segment::Text(_) => {
shape_range(&mut items, world, &bidi, cursor .. end, styles);
shape_range(&mut items, world, &bidi, cursor..end, styles);
}
Segment::Spacing(spacing) => match spacing {
Spacing::Relative(v) => {
@ -574,18 +577,18 @@ fn shape_range<'a>(
let mut cursor = range.start;
// Group by embedding level and script.
for i in cursor .. range.end {
for i in cursor..range.end {
if !bidi.text.is_char_boundary(i) {
continue;
}
let level = bidi.levels[i];
let script =
bidi.text[i ..].chars().next().map_or(Script::Unknown, |c| c.script());
bidi.text[i..].chars().next().map_or(Script::Unknown, |c| c.script());
if level != prev_level || !is_compatible(script, prev_script) {
if cursor < i {
process(&bidi.text[cursor .. i], prev_level);
process(&bidi.text[cursor..i], prev_level);
}
cursor = i;
prev_level = level;
@ -595,7 +598,7 @@ fn shape_range<'a>(
}
}
process(&bidi.text[cursor .. range.end], prev_level);
process(&bidi.text[cursor..range.end], prev_level);
}
/// Whether this is not a specific script.
@ -655,7 +658,7 @@ fn linebreak_simple<'a>(
for (end, mandatory, hyphen) in breakpoints(p) {
// Compute the line and its size.
let mut attempt = line(p, world, start .. end, mandatory, hyphen);
let mut attempt = line(p, world, start..end, mandatory, hyphen);
// If the line doesn't fit anymore, we push the last fitting attempt
// into the stack and rebuild the line from the attempt's end. The
@ -664,7 +667,7 @@ fn linebreak_simple<'a>(
if let Some((last_attempt, last_end)) = last.take() {
lines.push(last_attempt);
start = last_end;
attempt = line(p, world, start .. end, mandatory, hyphen);
attempt = line(p, world, start..end, mandatory, hyphen);
}
}
@ -731,7 +734,7 @@ fn linebreak_optimized<'a>(
let mut table = vec![Entry {
pred: 0,
total: 0.0,
line: line(p, world, 0 .. 0, false, false),
line: line(p, world, 0..0, false, false),
}];
let em = p.styles.get(TextNode::SIZE);
@ -745,7 +748,7 @@ fn linebreak_optimized<'a>(
for (i, pred) in table.iter_mut().enumerate().skip(active) {
// Layout the line.
let start = pred.line.end;
let attempt = line(p, world, start .. end, mandatory, hyphen);
let attempt = line(p, world, start..end, mandatory, hyphen);
// Determine how much the line's spaces would need to be stretched
// to make it the desired width.
@ -877,7 +880,7 @@ impl Iterator for Breakpoints<'_> {
// Hyphenate the next word.
if self.p.hyphenate != Some(false) {
if let Some(lang) = self.lang(self.offset) {
let word = &self.p.bidi.text[self.offset .. self.end];
let word = &self.p.bidi.text[self.offset..self.end];
let trimmed = word.trim_end_matches(|c: char| !c.is_alphabetic());
if !trimmed.is_empty() {
self.suffix = self.offset + trimmed.len();
@ -953,7 +956,7 @@ fn line<'a>(
// end of the line.
let base = expanded.end - shaped.text.len();
let start = range.start.max(base);
let text = &p.bidi.text[start .. range.end];
let text = &p.bidi.text[start..range.end];
let trimmed = text.trim_end();
range.end = start + trimmed.len();
@ -973,7 +976,7 @@ fn line<'a>(
// are no other items in the line.
if hyphen || start + shaped.text.len() > range.end {
if hyphen || start < range.end || before.is_empty() {
let shifted = start - base .. range.end - base;
let shifted = start - base..range.end - base;
let mut reshaped = shaped.reshape(world, shifted);
if hyphen || shy {
reshaped.push_hyphen(world);
@ -996,7 +999,7 @@ fn line<'a>(
// Reshape if necessary.
if range.start + shaped.text.len() > end {
if range.start < end {
let shifted = range.start - base .. end - base;
let shifted = range.start - base..end - base;
let reshaped = shaped.reshape(world, shifted);
width += reshaped.width;
first = Some(Item::Text(reshaped));
@ -1168,7 +1171,7 @@ fn commit(
offset += p.align.position(remaining);
}
if width > Abs::zero() {
for _ in 0 .. (count as usize).min(1000) {
for _ in 0..(count as usize).min(1000) {
push(&mut offset, frame.clone());
offset += apart;
}
@ -1229,7 +1232,7 @@ fn reorder<'a>(line: &'a Line<'a>) -> Vec<&Item<'a>> {
reordered.extend(line.slice(run.clone()));
if levels[run.start].is_rtl() {
reordered[prev ..].reverse();
reordered[prev..].reverse();
}
}

View File

@ -117,22 +117,38 @@ impl<'s> Quotes<'s> {
/// The opening quote.
fn open(&self, double: bool) -> &'s str {
if double { self.double_open } else { self.single_open }
if double {
self.double_open
} else {
self.single_open
}
}
/// The closing quote.
fn close(&self, double: bool) -> &'s str {
if double { self.double_close } else { self.single_close }
if double {
self.double_close
} else {
self.single_close
}
}
/// Which character should be used as a prime.
fn prime(&self, double: bool) -> &'static str {
if double { "" } else { "" }
if double {
""
} else {
""
}
}
/// Which character should be used as a fallback quote.
fn fallback(&self, double: bool) -> &'static str {
if double { "\"" } else { "" }
if double {
"\""
} else {
""
}
}
}

View File

@ -98,7 +98,6 @@ impl<'a> ShapedText<'a> {
self.glyphs.as_ref().group_by_key(|g| (g.font.clone(), g.y_offset))
{
let pos = Point::new(offset, top + shift + y_offset.at(self.size));
let glyphs = group
.iter()
.map(|glyph| Glyph {
@ -115,14 +114,7 @@ impl<'a> ShapedText<'a> {
})
.collect();
let text = Text {
font,
size: self.size,
lang,
fill,
glyphs,
};
let text = Text { font, size: self.size, lang, fill, glyphs };
let text_layer = frame.layer();
let width = text.width();
@ -253,7 +245,7 @@ impl<'a> ShapedText<'a> {
let left = self.find_safe_to_break(start, Side::Left)?;
let right = self.find_safe_to_break(end, Side::Right)?;
Some(&self.glyphs[left .. right])
Some(&self.glyphs[left..right])
}
/// Find the glyph offset matching the text index that is most towards the
@ -274,7 +266,11 @@ impl<'a> ShapedText<'a> {
.glyphs
.binary_search_by(|g| {
let ordering = g.cluster.cmp(&text_index);
if ltr { ordering } else { ordering.reverse() }
if ltr {
ordering
} else {
ordering.reverse()
}
})
.ok()?;
@ -385,9 +381,7 @@ fn shape_segment<'a>(
}
// Extract the font id or shape notdef glyphs if we couldn't find any font.
let font = if let Some(font) = selection {
font
} else {
let Some(font) = selection else {
if let Some(font) = ctx.used.first().cloned() {
shape_tofus(ctx, base, text, font);
}
@ -429,7 +423,7 @@ fn shape_segment<'a>(
y_offset: font.to_em(pos[i].y_offset),
cluster: base + cluster,
safe_to_break: !info.unsafe_to_break(),
c: text[cluster ..].chars().next().unwrap(),
c: text[cluster..].chars().next().unwrap(),
});
} else {
// Determine the source text range for the tofu sequence.
@ -466,11 +460,11 @@ fn shape_segment<'a>(
.and_then(|last| infos.get(last))
.map_or(text.len(), |info| info.cluster as usize);
start .. end
start..end
};
// Trim half-baked cluster.
let remove = base + range.start .. base + range.end;
let remove = base + range.start..base + range.end;
while ctx.glyphs.last().map_or(false, |g| remove.contains(&g.cluster)) {
ctx.glyphs.pop();
}

View File

@ -78,10 +78,7 @@ fn search_text(content: &Content, mode: ShiftKind) -> Option<EcoString> {
} else if content.is::<SpaceNode>() {
Some(' '.into())
} else if let Some(text) = content.downcast::<TextNode>() {
if let Some(sup) = convert_script(&text.0, mode) {
return Some(sup);
}
None
convert_script(&text.0, mode)
} else if let Some(seq) = content.downcast::<SequenceNode>() {
let mut full = EcoString::new();
for item in seq.0.iter() {
@ -138,7 +135,7 @@ fn to_superscript_codepoint(c: char) -> Option<char> {
'1' => 0x00B9,
'2' => 0x00B2,
'3' => 0x00B3,
'4' ..= '9' => 0x2070 + (c as u32 + 4 - '4' as u32),
'4'..='9' => 0x2070 + (c as u32 + 4 - '4' as u32),
'+' => 0x207A,
'-' => 0x207B,
'=' => 0x207C,
@ -155,7 +152,7 @@ fn to_superscript_codepoint(c: char) -> Option<char> {
fn to_subscript_codepoint(c: char) -> Option<char> {
char::from_u32(match c {
'0' => 0x2080,
'1' ..= '9' => 0x2080 + (c as u32 - '0' as u32),
'1'..='9' => 0x2080 + (c as u32 - '0' as u32),
'+' => 0x208A,
'-' => 0x208B,
'=' => 0x208C,

View File

@ -18,7 +18,8 @@ pub fn capability(_: TokenStream, item: TokenStream) -> TokenStream {
quote! {
#item_trait
impl ::typst::model::Capability for dyn #name {}
}.into()
}
.into()
}
/// Implement `Node` for a struct.
@ -349,10 +350,7 @@ fn parse_property(item: &mut syn::ImplItemConst) -> Result<Property> {
let span = property.name.span();
if property.skip && property.shorthand.is_some() {
return Err(Error::new(
span,
"skip and shorthand are mutually exclusive",
));
return Err(Error::new(span, "skip and shorthand are mutually exclusive"));
}
if property.referenced && (property.fold || property.resolve) {

View File

@ -1,11 +1,6 @@
unstable_features = true
overflow_delimited_expr = true
spaces_around_ranges = true
use_small_heuristics = "Max"
max_width = 90
chain_width = 70
struct_lit_width = 50
use_field_init_shorthand = true
merge_derives = false
max_width = 90
struct_lit_width = 40
chain_width = 70
single_line_if_else_max_width = 60

View File

@ -85,8 +85,8 @@ impl SourceError {
let full = world.source(self.span.source()).range(self.span);
match self.pos {
ErrorPos::Full => full,
ErrorPos::Start => full.start .. full.start,
ErrorPos::End => full.end .. full.end,
ErrorPos::Start => full.start..full.start,
ErrorPos::End => full.end..full.end,
}
}
}

View File

@ -3,5 +3,5 @@
mod pdf;
mod render;
pub use pdf::pdf;
pub use render::render;
pub use self::pdf::pdf;
pub use self::render::render;

View File

@ -174,7 +174,7 @@ where
&'a self,
refs: &'a [Ref],
) -> impl Iterator<Item = (Ref, usize)> + 'a {
refs.iter().copied().zip(0 .. self.to_pdf.len())
refs.iter().copied().zip(0..self.to_pdf.len())
}
fn items(&self) -> impl Iterator<Item = &T> + '_ {

View File

@ -256,8 +256,8 @@ fn render_outline_glyph(
// Blend the glyph bitmap with the existing pixels on the canvas.
// FIXME: This doesn't respect the clipping mask.
let pixels = bytemuck::cast_slice_mut::<u8, u32>(canvas.data_mut());
for x in left.clamp(0, cw) .. right.clamp(0, cw) {
for y in top.clamp(0, ch) .. bottom.clamp(0, ch) {
for x in left.clamp(0, cw)..right.clamp(0, cw) {
for y in top.clamp(0, ch)..bottom.clamp(0, ch) {
let ai = ((y - top) * mw + (x - left)) as usize;
let cov = bitmap.coverage[ai];
if cov == 0 {
@ -312,10 +312,7 @@ fn render_shape(
if let Some(Stroke { paint, thickness }) = shape.stroke {
let paint = paint.into();
let stroke = sk::Stroke {
width: thickness.to_f32(),
..Default::default()
};
let stroke = sk::Stroke { width: thickness.to_f32(), ..Default::default() };
canvas.stroke_path(&path, &paint, &stroke, ts, mask);
}
@ -342,11 +339,8 @@ fn render_image(
match image.decode().unwrap() {
DecodedImage::Raster(dynamic, _) => {
let downscale = w < image.width();
let filter = if downscale {
FilterType::Lanczos3
} else {
FilterType::CatmullRom
};
let filter =
if downscale { FilterType::Lanczos3 } else { FilterType::CatmullRom };
let buf = dynamic.resize(w, h, filter);
for ((_, _, src), dest) in buf.pixels().zip(pixmap.pixels_mut()) {
let Rgba([r, g, b, a]) = src;

View File

@ -172,7 +172,7 @@ impl FontInfo {
/// Compute metadata for all fonts in the given data.
pub fn from_data(data: &[u8]) -> impl Iterator<Item = FontInfo> + '_ {
let count = ttf_parser::fonts_in_collection(data).unwrap_or(1);
(0 .. count).filter_map(move |index| {
(0..count).filter_map(move |index| {
let ttf = ttf_parser::Face::parse(data, index).ok()?;
Self::from_ttf(&ttf)
})
@ -241,9 +241,9 @@ impl FontInfo {
if let Some(panose) = ttf
.raw_face()
.table(Tag::from_bytes(b"OS/2"))
.and_then(|os2| os2.get(32 .. 45))
.and_then(|os2| os2.get(32..45))
{
if matches!(panose, [2, 2 ..= 10, ..]) {
if matches!(panose, [2, 2..=10, ..]) {
flags.insert(FontFlags::SERIF);
}
}
@ -305,9 +305,8 @@ fn typographic_family(mut family: &str) -> &str {
const SEPARATORS: [char; 3] = [' ', '-', '_'];
// Modifiers that can appear in combination with suffixes.
const MODIFIERS: &[&str] = &[
"extra", "ext", "ex", "x", "semi", "sem", "sm", "demi", "dem", "ultra",
];
const MODIFIERS: &[&str] =
&["extra", "ext", "ex", "x", "semi", "sem", "sm", "demi", "dem", "ultra"];
// Style suffixes.
#[rustfmt::skip]
@ -331,9 +330,8 @@ fn typographic_family(mut family: &str) -> &str {
len = trimmed.len();
// Find style suffix.
let mut t = match SUFFIXES.iter().find_map(|s| trimmed.strip_suffix(s)) {
Some(t) => t,
None => break,
let Some(mut t) = SUFFIXES.iter().find_map(|s| trimmed.strip_suffix(s)) else {
break;
};
// Strip optional separator.
@ -351,7 +349,7 @@ fn typographic_family(mut family: &str) -> &str {
}
}
&family[.. len]
&family[..len]
}
/// How many words the two strings share in their prefix.
@ -411,7 +409,7 @@ impl Coverage {
let mut cursor = 0;
for &run in &self.0 {
if (cursor .. cursor + run).contains(&c) {
if (cursor..cursor + run).contains(&c) {
return inside;
}
cursor += run;
@ -432,19 +430,13 @@ mod tests {
assert_eq!(typographic_family("eras bold"), "eras");
assert_eq!(typographic_family("footlight mt light"), "footlight mt");
assert_eq!(typographic_family("times new roman"), "times new roman");
assert_eq!(
typographic_family("noto sans mono cond sembd"),
"noto sans mono"
);
assert_eq!(typographic_family("noto sans mono cond sembd"), "noto sans mono");
assert_eq!(typographic_family("noto serif SEMCOND sembd"), "noto serif");
assert_eq!(typographic_family("crimson text"), "crimson text");
assert_eq!(typographic_family("footlight light"), "footlight");
assert_eq!(typographic_family("Noto Sans"), "Noto Sans");
assert_eq!(typographic_family("Noto Sans Light"), "Noto Sans");
assert_eq!(
typographic_family("Noto Sans Semicondensed Heavy"),
"Noto Sans"
);
assert_eq!(typographic_family("Noto Sans Semicondensed Heavy"), "Noto Sans");
assert_eq!(typographic_family("Familx"), "Familx");
assert_eq!(typographic_family("Font Ultra"), "Font Ultra");
assert_eq!(typographic_family("Font Ultra Bold"), "Font");
@ -458,7 +450,7 @@ mod tests {
assert_eq!(coverage.0, runs);
let max = 5 + set.iter().copied().max().unwrap_or_default();
for c in 0 .. max {
for c in 0..max {
assert_eq!(set.contains(&c), coverage.contains(c));
}
}

View File

@ -3,8 +3,8 @@
mod book;
mod variant;
pub use book::*;
pub use variant::*;
pub use self::book::*;
pub use self::variant::*;
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};

View File

@ -153,7 +153,7 @@ impl Frame {
where
I: IntoIterator<Item = (Point, Element)>,
{
Arc::make_mut(&mut self.elements).splice(0 .. 0, elements);
Arc::make_mut(&mut self.elements).splice(0..0, elements);
}
/// Add a frame at a position in the background.
@ -181,7 +181,7 @@ impl Frame {
// Try to transfer the elements without adjusting the position.
// Also try to reuse the elements if the Arc isn't shared.
let range = layer .. layer;
let range = layer..layer;
if pos.is_zero() {
let sink = Arc::make_mut(&mut self.elements);
match Arc::try_unwrap(frame.elements) {
@ -407,7 +407,7 @@ impl Lang {
/// Return the language code as an all lowercase string slice.
pub fn as_str(&self) -> &str {
std::str::from_utf8(&self.0[.. usize::from(self.1)]).unwrap_or_default()
std::str::from_utf8(&self.0[..usize::from(self.1)]).unwrap_or_default()
}
/// The default direction for the language.
@ -426,9 +426,9 @@ impl FromStr for Lang {
/// Construct a language from a two- or three-byte ISO 639-1/2/3 code.
fn from_str(iso: &str) -> Result<Self, Self::Err> {
let len = iso.len();
if matches!(len, 2 ..= 3) && iso.is_ascii() {
if matches!(len, 2..=3) && iso.is_ascii() {
let mut bytes = [b' '; 3];
bytes[.. len].copy_from_slice(iso.as_bytes());
bytes[..len].copy_from_slice(iso.as_bytes());
bytes.make_ascii_lowercase();
Ok(Self(bytes, len as u8))
} else {
@ -538,9 +538,6 @@ impl Role {
pub fn is_weak(self) -> bool {
// In Typst, all text is in a paragraph, so paragraph isn't very
// descriptive.
matches!(
self,
Self::Paragraph | Self::GenericBlock | Self::GenericInline
)
matches!(self, Self::Paragraph | Self::GenericBlock | Self::GenericInline)
}
}

View File

@ -54,10 +54,7 @@ impl<T> Axes<T> {
/// Zip two instances into an instance over a tuple.
pub fn zip<U>(self, other: Axes<U>) -> Axes<(T, U)> {
Axes {
x: (self.x, other.x),
y: (self.y, other.y),
}
Axes { x: (self.x, other.x), y: (self.y, other.y) }
}
/// Whether a condition is true for at least one of fields.
@ -100,18 +97,12 @@ impl<T: Default> Axes<T> {
impl<T: Ord> Axes<T> {
/// The component-wise minimum of this and another instance.
pub fn min(self, other: Self) -> Self {
Self {
x: self.x.min(other.x),
y: self.y.min(other.y),
}
Self { x: self.x.min(other.x), y: self.y.min(other.y) }
}
/// The component-wise minimum of this and another instance.
pub fn max(self, other: Self) -> Self {
Self {
x: self.x.max(other.x),
y: self.y.max(other.y),
}
Self { x: self.x.max(other.x), y: self.y.max(other.y) }
}
}

View File

@ -16,12 +16,7 @@ pub struct Corners<T> {
impl<T> Corners<T> {
/// Create a new instance from the four components.
pub const fn new(top_left: T, top_right: T, bottom_right: T, bottom_left: T) -> Self {
Self {
top_left,
top_right,
bottom_right,
bottom_left,
}
Self { top_left, top_right, bottom_right, bottom_left }
}
/// Create an instance with four equal components.
@ -66,12 +61,7 @@ impl<T> Corners<T> {
/// An iterator over the corners, starting with the top left corner,
/// clockwise.
pub fn iter(&self) -> impl Iterator<Item = &T> {
[
&self.top_left,
&self.top_right,
&self.bottom_right,
&self.bottom_left,
]
[&self.top_left, &self.top_right, &self.bottom_right, &self.bottom_left]
.into_iter()
}

View File

@ -18,9 +18,5 @@ pub fn ellipse(size: Size, fill: Option<Paint>, stroke: Option<Stroke>) -> Shape
path.cubic_to(point(rx, my), point(mx, ry), point(z, ry));
path.cubic_to(point(-mx, ry), point(-rx, my), point(-rx, z));
Shape {
geometry: Geometry::Path(path),
stroke,
fill,
}
Shape { geometry: Geometry::Path(path), stroke, fill }
}

View File

@ -45,7 +45,11 @@ impl Em {
/// Convert to an absolute length at the given font size.
pub fn at(self, font_size: Abs) -> Abs {
let resolved = font_size * self.get();
if resolved.is_finite() { resolved } else { Abs::zero() }
if resolved.is_finite() {
resolved
} else {
Abs::zero()
}
}
}

View File

@ -92,10 +92,7 @@ impl Add for Length {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self {
abs: self.abs + rhs.abs,
em: self.em + rhs.em,
}
Self { abs: self.abs + rhs.abs, em: self.em + rhs.em }
}
}

View File

@ -24,27 +24,27 @@ mod size;
mod stroke;
mod transform;
pub use abs::*;
pub use align::*;
pub use angle::*;
pub use axes::*;
pub use corners::*;
pub use dir::*;
pub use ellipse::*;
pub use em::*;
pub use fr::*;
pub use length::*;
pub use paint::*;
pub use path::*;
pub use point::*;
pub use ratio::*;
pub use rel::*;
pub use rounded::*;
pub use scalar::*;
pub use sides::*;
pub use size::*;
pub use stroke::*;
pub use transform::*;
pub use self::abs::*;
pub use self::align::*;
pub use self::angle::*;
pub use self::axes::*;
pub use self::corners::*;
pub use self::dir::*;
pub use self::ellipse::*;
pub use self::em::*;
pub use self::fr::*;
pub use self::length::*;
pub use self::paint::*;
pub use self::path::*;
pub use self::point::*;
pub use self::ratio::*;
pub use self::rel::*;
pub use self::rounded::*;
pub use self::scalar::*;
pub use self::sides::*;
pub use self::size::*;
pub use self::stroke::*;
pub use self::transform::*;
use std::cmp::Ordering;
use std::f64::consts::PI;
@ -95,20 +95,12 @@ pub enum Geometry {
impl Geometry {
/// Fill the geometry without a stroke.
pub fn filled(self, fill: Paint) -> Shape {
Shape {
geometry: self,
fill: Some(fill),
stroke: None,
}
Shape { geometry: self, fill: Some(fill), stroke: None }
}
/// Stroke the geometry without a fill.
pub fn stroked(self, stroke: Stroke) -> Shape {
Shape {
geometry: self,
fill: None,
stroke: Some(stroke),
}
Shape { geometry: self, fill: None, stroke: Some(stroke) }
}
}

View File

@ -244,11 +244,11 @@ impl FromStr for RgbaColor {
}
let mut values: [u8; 4] = [u8::MAX; 4];
for elem in if alpha { 0 .. 4 } else { 0 .. 3 } {
for elem in if alpha { 0..4 } else { 0..3 } {
let item_len = if long { 2 } else { 1 };
let pos = elem * item_len;
let item = &hex_str[pos .. (pos + item_len)];
let item = &hex_str[pos..(pos + item_len)];
values[elem] = u8::from_str_radix(item, 16).unwrap();
if short {
@ -324,12 +324,7 @@ impl CmykColor {
round_u8(255.0 * (1.0 - c) * (1.0 - k))
};
RgbaColor {
r: f(self.c),
g: f(self.m),
b: f(self.y),
a: 255,
}
RgbaColor { r: f(self.c), g: f(self.m), b: f(self.y), a: 255 }
}
/// Lighten this color by a factor.

View File

@ -46,7 +46,11 @@ impl Ratio {
/// Return the ratio of the given `whole`.
pub fn of<T: Numeric>(self, whole: T) -> T {
let resolved = whole * self.get();
if resolved.is_finite() { resolved } else { T::zero() }
if resolved.is_finite() {
resolved
} else {
T::zero()
}
}
}

View File

@ -128,10 +128,7 @@ impl<T: Numeric> Mul<f64> for Rel<T> {
type Output = Self;
fn mul(self, other: f64) -> Self::Output {
Self {
rel: self.rel * other,
abs: self.abs * other,
}
Self { rel: self.rel * other, abs: self.abs * other }
}
}
@ -147,10 +144,7 @@ impl<T: Numeric> Div<f64> for Rel<T> {
type Output = Self;
fn div(self, other: f64) -> Self::Output {
Self {
rel: self.rel / other,
abs: self.abs / other,
}
Self { rel: self.rel / other, abs: self.abs / other }
}
}

View File

@ -21,11 +21,7 @@ pub fn rounded_rect(
if !stroke.is_uniform() {
for (path, stroke) in stroke_segments(size, radius, stroke) {
if stroke.is_some() {
res.push(Shape {
geometry: Geometry::Path(path),
fill: None,
stroke,
});
res.push(Shape { geometry: Geometry::Path(path), fill: None, stroke });
}
}
}

View File

@ -29,11 +29,7 @@ impl Args {
pub fn new(span: Span, values: impl IntoIterator<Item = Value>) -> Self {
let items = values
.into_iter()
.map(|value| Arg {
span,
name: None,
value: Spanned::new(value, span),
})
.map(|value| Arg { span, name: None, value: Spanned::new(value, span) })
.collect();
Self { span, items }
}

View File

@ -119,7 +119,7 @@ impl Array {
.ok_or_else(|| out_of_bounds(end, len))?
.max(start);
Ok(Self::from_vec(self.0[start .. end].to_vec()))
Ok(Self::from_vec(self.0[start..end].to_vec()))
}
/// Whether the array contains a specific value.
@ -287,11 +287,7 @@ impl Array {
/// Resolve an index.
fn locate(&self, index: i64) -> Option<usize> {
usize::try_from(if index >= 0 {
index
} else {
self.len().checked_add(index)?
})
usize::try_from(if index >= 0 { index } else { self.len().checked_add(index)? })
.ok()
}
}

View File

@ -161,10 +161,7 @@ impl Add for Content {
return lhs;
}
let seq = match (
lhs.downcast::<SequenceNode>(),
rhs.downcast::<SequenceNode>(),
) {
let seq = match (lhs.downcast::<SequenceNode>(), rhs.downcast::<SequenceNode>()) {
(Some(lhs), Some(rhs)) => lhs.0.iter().chain(&rhs.0).cloned().collect(),
(Some(lhs), None) => lhs.0.iter().cloned().chain(iter::once(rhs)).collect(),
(None, Some(rhs)) => iter::once(lhs).chain(rhs.0.iter().cloned()).collect(),

View File

@ -140,7 +140,6 @@ fn eval_markup(
vm.scopes.top.define(wrap.binding().take(), tail);
wrap.body().eval(vm)?.display(vm.world)
}
_ => node.eval(vm)?,
});
@ -162,7 +161,7 @@ impl Eval for ast::MarkupNode {
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
match self {
Self::Space(v) => Ok(match v.newlines() {
0 ..= 1 => (vm.items.space)(),
0..=1 => (vm.items.space)(),
_ => (vm.items.parbreak)(),
}),
Self::Linebreak(v) => v.eval(vm),
@ -369,10 +368,7 @@ impl Eval for ast::Frac {
type Output = Content;
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
Ok((vm.items.math_frac)(
self.num().eval(vm)?,
self.denom().eval(vm)?,
))
Ok((vm.items.math_frac)(self.num().eval(vm)?, self.denom().eval(vm)?))
}
}
@ -501,7 +497,6 @@ fn eval_code(
vm.scopes.top.define(wrap.binding().take(), tail);
wrap.body().eval(vm)?
}
_ => expr.eval(vm)?,
};
@ -676,18 +671,12 @@ impl Eval for ast::FieldAccess {
Ok(match object {
Value::Dict(dict) => dict.get(&field).at(span)?.clone(),
Value::Content(node) => node
.to::<dyn Show>()
.and_then(|node| node.field(&field))
.ok_or_else(|| format!("unknown field {field:?}"))
.at(span)?,
v => bail!(
self.target().span(),
"cannot access field on {}",
v.type_name()
),
v => bail!(self.target().span(), "cannot access field on {}", v.type_name()),
})
}
}
@ -706,7 +695,6 @@ impl Eval for ast::FuncCall {
let point = || Tracepoint::Call(func.name().map(Into::into));
func.call(vm, args).trace(vm.world, point, self.span())?
}
v => bail!(
self.callee().span(),
"expected callable or collection, found {}",

View File

@ -32,12 +32,7 @@ impl Func {
name: &'static str,
func: fn(&mut Vm, &mut Args) -> SourceResult<Value>,
) -> Self {
Self(Arc::new(Repr::Native(Native {
name,
func,
set: None,
node: None,
})))
Self(Arc::new(Repr::Native(Native { name, func, set: None, node: None })))
}
/// Create a new function from a native rust node.
@ -92,7 +87,7 @@ impl Func {
Repr::Native(native) => (native.func)(vm, &mut args)?,
Repr::Closure(closure) => closure.call(vm, &mut args)?,
Repr::With(wrapped, applied) => {
args.items.splice(.. 0, applied.items.iter().cloned());
args.items.splice(..0, applied.items.iter().cloned());
return wrapped.call(vm, args);
}
};
@ -194,12 +189,15 @@ impl Closure {
// Parse the arguments according to the parameter list.
for (param, default) in &self.params {
scopes.top.define(param.clone(), match default {
None => args.expect::<Value>(param)?,
scopes.top.define(
param.clone(),
match default {
Some(default) => {
args.named::<Value>(param)?.unwrap_or_else(|| default.clone())
}
});
None => args.expect::<Value>(param)?,
},
);
}
// Put the remaining arguments into the sink.

View File

@ -1,4 +1,4 @@
//! Layout and computation model.
//! Document and computation model.
#[macro_use]
mod items;
@ -23,18 +23,18 @@ mod ops;
mod scope;
mod vm;
pub use self::str::*;
pub use args::*;
pub use array::*;
pub use cast::*;
pub use content::*;
pub use dict::*;
pub use eval::*;
pub use func::*;
pub use items::*;
pub use scope::*;
pub use styles::*;
pub use value::*;
pub use vm::*;
pub use typst_macros::{capability, node};
pub use self::args::*;
pub use self::array::*;
pub use self::cast::*;
pub use self::content::*;
pub use self::dict::*;
pub use self::eval::*;
pub use self::func::*;
pub use self::items::*;
pub use self::scope::*;
pub use self::str::*;
pub use self::styles::*;
pub use self::value::*;
pub use self::vm::*;

View File

@ -103,18 +103,18 @@ pub fn add(lhs: Value, rhs: Value) -> StrResult<Value> {
if let (Some(&a), Some(&b)) =
(a.downcast::<GenAlign>(), b.downcast::<GenAlign>())
{
if a.axis() != b.axis() {
Value::dynamic(match a.axis() {
Axis::X => Axes { x: a, y: b },
Axis::Y => Axes { x: b, y: a },
})
} else {
if a.axis() == b.axis() {
return Err(format!("cannot add two {:?} alignments", a.axis()));
}
} else {
return Ok(Value::dynamic(match a.axis() {
Axis::X => Axes { x: a, y: b },
Axis::Y => Axes { x: b, y: a },
}));
};
mismatch!("cannot add {} and {}", a, b);
}
}
(a, b) => mismatch!("cannot add {} and {}", a, b),
})
@ -370,17 +370,11 @@ pub fn not_in(lhs: Value, rhs: Value) -> StrResult<Value> {
/// Test for containment.
pub fn contains(lhs: &Value, rhs: &Value) -> Option<bool> {
Some(match (lhs, rhs) {
(Str(a), Str(b)) => b.as_str().contains(a.as_str()),
(Dyn(a), Str(b)) => {
if let Some(regex) = a.downcast::<Regex>() {
regex.is_match(b)
} else {
return Option::None;
match (lhs, rhs) {
(Str(a), Str(b)) => Some(b.as_str().contains(a.as_str())),
(Dyn(a), Str(b)) => a.downcast::<Regex>().map(|regex| regex.is_match(b)),
(Str(a), Dict(b)) => Some(b.contains(a)),
(a, Array(b)) => Some(b.contains(a)),
_ => Option::None,
}
}
(Str(a), Dict(b)) => b.contains(a),
(a, Array(b)) => b.contains(a),
_ => return Option::None,
})
}

View File

@ -67,16 +67,12 @@ impl Str {
.ok_or_else(|| out_of_bounds(end, len))?
.max(start);
Ok(self.0[start .. end].into())
Ok(self.0[start..end].into())
}
/// Resolve an index.
fn locate(&self, index: i64) -> Option<usize> {
usize::try_from(if index >= 0 {
index
} else {
self.len().checked_add(index)?
})
usize::try_from(if index >= 0 { index } else { self.len().checked_add(index)? })
.ok()
}
@ -207,7 +203,7 @@ impl Str {
Some(StrPattern::Regex(re)) => {
let s = self.as_str();
let mut last = 0;
let mut range = 0 .. s.len();
let mut range = 0..s.len();
for m in re.find_iter(s) {
// Does this match follow directly after the last one?
@ -235,7 +231,7 @@ impl Str {
range.end = s.len();
}
&s[range.start .. range.start.max(range.end)]
&s[range.start..range.start.max(range.end)]
}
};
@ -271,10 +267,7 @@ impl Str {
/// The out of bounds access error message.
#[cold]
fn out_of_bounds(index: i64, len: i64) -> String {
format!(
"string index out of bounds (index: {}, len: {})",
index, len
)
format!("string index out of bounds (index: {}, len: {})", index, len)
}
/// Convert an item of std's `match_indices` to a dictionary.

View File

@ -94,7 +94,7 @@ impl StyleMap {
/// This is useful over `chain` when you want to combine two maps, but you
/// still need an owned map without a lifetime.
pub fn apply_map(&mut self, tail: &Self) {
self.0.splice(0 .. 0, tail.0.iter().cloned());
self.0.splice(0..0, tail.0.iter().cloned());
}
/// Mark all contained properties as _scoped_. This means that they only
@ -159,10 +159,7 @@ impl StyleEntry {
}
}
StyleChain {
head: std::slice::from_ref(self),
tail: Some(tail),
}
StyleChain { head: std::slice::from_ref(self), tail: Some(tail) }
}
/// If this is a property, return it.
@ -328,7 +325,7 @@ impl<'a> StyleChain<'a> {
let mut suffix = StyleMap::new();
let take = self.links().count().saturating_sub(len);
for link in self.links().take(take) {
suffix.0.splice(0 .. 0, link.iter().cloned());
suffix.0.splice(0..0, link.iter().cloned());
}
suffix
}
@ -344,10 +341,7 @@ impl<'a> StyleChain<'a> {
/// Iterate over the entries of the chain.
fn entries(self) -> Entries<'a> {
Entries {
inner: [].as_slice().iter(),
links: self.links(),
}
Entries { inner: [].as_slice().iter(), links: self.links() }
}
/// Iterate over the links of the chain.
@ -582,12 +576,12 @@ impl<'a, T> StyleVecBuilder<'a, T> {
for &(mut chain, _) in iter {
let len = chain.links().count();
if len < shared {
for _ in 0 .. shared - len {
for _ in 0..shared - len {
trunk.pop();
}
shared = len;
} else if len > shared {
for _ in 0 .. len - shared {
for _ in 0..len - shared {
chain.pop();
}
}
@ -1017,7 +1011,7 @@ impl Recipe {
for mat in regex.find_iter(text) {
let start = mat.start();
if cursor < start {
result.push(make(text[cursor .. start].into()));
result.push(make(text[cursor..start].into()));
}
result.push(self.call(world, || Value::Str(mat.as_str().into()))?);
@ -1029,7 +1023,7 @@ impl Recipe {
}
if cursor < text.len() {
result.push(make(text[cursor ..].into()));
result.push(make(text[cursor..].into()));
}
Content::sequence(result)
@ -1066,11 +1060,7 @@ impl Recipe {
impl Debug for Recipe {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"Recipe matching {:?} from {:?}",
self.pattern, self.func.span
)
write!(f, "Recipe matching {:?} from {:?}", self.pattern, self.func.span)
}
}

View File

@ -294,11 +294,8 @@ where
}
fn dyn_eq(&self, other: &Dynamic) -> bool {
if let Some(other) = other.downcast::<Self>() {
let Some(other) = other.downcast::<Self>() else { return false };
self == other
} else {
false
}
}
fn dyn_type_name(&self) -> &'static str {
@ -411,15 +408,9 @@ mod tests {
test(Abs::pt(5.5), "5.5pt");
test(Angle::deg(90.0), "90deg");
test(Ratio::one() / 2.0, "50%");
test(
Ratio::new(0.3) + Length::from(Abs::cm(2.0)),
"30% + 56.69pt",
);
test(Ratio::new(0.3) + Length::from(Abs::cm(2.0)), "30% + 56.69pt");
test(Fr::one() * 7.55, "7.55fr");
test(
Color::Rgba(RgbaColor::new(1, 1, 1, 0xff)),
"rgb(\"#010101\")",
);
test(Color::Rgba(RgbaColor::new(1, 1, 1, 0xff)), "rgb(\"#010101\")");
// Collections.
test("hello", r#""hello""#);

View File

@ -1471,7 +1471,11 @@ impl ForPattern {
pub fn key(&self) -> Option<Ident> {
let mut children = self.0.children().filter_map(SyntaxNode::cast);
let key = children.next();
if children.next().is_some() { key } else { None }
if children.next().is_some() {
key
} else {
None
}
}
/// The value part of the pattern.

View File

@ -81,7 +81,7 @@ where
F: FnMut(Range<usize>, Style),
{
if node.children().len() == 0 {
let range = offset .. offset + node.len();
let range = offset..offset + node.len();
let style = highlighter.style_for_stack(&scopes);
f(range, style);
return;
@ -112,7 +112,7 @@ where
F: FnMut(Range<usize>, Category),
{
for (i, child) in node.children().enumerate() {
let span = offset .. offset + child.len();
let span = offset..offset + child.len();
if range.start <= span.end && range.end >= span.start {
if let Some(category) = Category::determine(child, node, i) {
f(span, category);
@ -412,29 +412,35 @@ mod tests {
fn test(text: &str, goal: &[(Range<usize>, Category)]) {
let mut vec = vec![];
let source = Source::detached(text);
let full = 0 .. text.len();
let full = 0..text.len();
highlight_categories(source.root(), full, &mut |range, category| {
vec.push((range, category));
});
assert_eq!(vec, goal);
}
test("= *AB*", &[(0 .. 6, Heading), (2 .. 6, Strong)]);
test("= *AB*", &[(0..6, Heading), (2..6, Strong)]);
test("#f(x + 1)", &[
(0 .. 2, Function),
(2 .. 3, Bracket),
(5 .. 6, Operator),
(7 .. 8, Number),
(8 .. 9, Bracket),
]);
test(
"#f(x + 1)",
&[
(0..2, Function),
(2..3, Bracket),
(5..6, Operator),
(7..8, Number),
(8..9, Bracket),
],
);
test("#let f(x) = x", &[
(0 .. 4, Keyword),
(5 .. 6, Function),
(6 .. 7, Bracket),
(8 .. 9, Bracket),
(10 .. 11, Operator),
]);
test(
"#let f(x) = x",
&[
(0..4, Keyword),
(5..6, Function),
(6..7, Bracket),
(8..9, Bracket),
(10..11, Operator),
],
);
}
}

View File

@ -28,7 +28,7 @@ pub fn reparse(
let id = root.span().source();
*root = parse(text);
root.numberize(id, Span::FULL).unwrap();
0 .. text.len()
0..text.len()
}
/// Try to reparse inside the given node.
@ -55,7 +55,7 @@ fn try_reparse(
// Find the the first child in the range of children to reparse.
for (i, child) in node.children().enumerate() {
let pos = NodePos { idx: i, offset };
let child_span = offset .. offset + child.len();
let child_span = offset..offset + child.len();
child_outermost = outermost && i + 1 == original_count;
match search {
@ -81,7 +81,7 @@ fn try_reparse(
} else {
// Update compulsary state of `ahead_nontrivia`.
if let Some(ahead_nontrivia) = ahead.as_mut() {
if let NodeKind::Space { newlines: (1 ..) } = child.kind() {
if let NodeKind::Space { newlines: (1..) } = child.kind() {
ahead_nontrivia.newline();
}
}
@ -126,10 +126,13 @@ fn try_reparse(
// If we were looking for a non-whitespace element and hit the end of
// the file here, we instead use EOF as the end of the span.
if let SearchState::RequireNonTrivia(start) = search {
search = SearchState::SpanFound(start, NodePos {
search = SearchState::SpanFound(
start,
NodePos {
idx: node.children().len() - 1,
offset: offset - node.children().last().unwrap().len(),
})
},
)
}
if let SearchState::Contained(pos) = search {
@ -156,7 +159,7 @@ fn try_reparse(
return Some(range);
}
let superseded_span = pos.offset .. pos.offset + prev_len;
let superseded_span = pos.offset..pos.offset + prev_len;
let func: Option<ReparseMode> = match child.kind() {
NodeKind::CodeBlock => Some(ReparseMode::Code),
NodeKind::ContentBlock => Some(ReparseMode::Content),
@ -170,7 +173,7 @@ fn try_reparse(
change,
node,
func,
pos.idx .. pos.idx + 1,
pos.idx..pos.idx + 1,
superseded_span,
outermost,
) {
@ -197,13 +200,13 @@ fn try_reparse(
}
let superseded_span =
start.offset .. end.offset + node.children().as_slice()[end.idx].len();
start.offset..end.offset + node.children().as_slice()[end.idx].len();
replace(
change,
node,
ReparseMode::MarkupElements { at_start, min_indent },
start.idx .. end.idx + 1,
start.idx..end.idx + 1,
superseded_span,
outermost,
)
@ -223,33 +226,33 @@ fn replace(
let differential: isize =
change.replacement_len as isize - change.replaced.len() as isize;
let newborn_end = (superseded_span.end as isize + differential) as usize;
let newborn_span = superseded_span.start .. newborn_end;
let newborn_span = superseded_span.start..newborn_end;
let mut prefix = "";
for (i, c) in change.text[.. newborn_span.start].char_indices().rev() {
for (i, c) in change.text[..newborn_span.start].char_indices().rev() {
if is_newline(c) {
break;
}
prefix = &change.text[i .. newborn_span.start];
prefix = &change.text[i..newborn_span.start];
}
let (newborns, terminated, amount) = match mode {
ReparseMode::Code => reparse_code_block(
prefix,
&change.text[newborn_span.start ..],
&change.text[newborn_span.start..],
newborn_span.len(),
),
ReparseMode::Content => reparse_content_block(
prefix,
&change.text[newborn_span.start ..],
&change.text[newborn_span.start..],
newborn_span.len(),
),
ReparseMode::MarkupElements { at_start, min_indent } => reparse_markup_elements(
prefix,
&change.text[newborn_span.start ..],
&change.text[newborn_span.start..],
newborn_span.len(),
differential,
&node.children().as_slice()[superseded_start ..],
&node.children().as_slice()[superseded_start..],
at_start,
min_indent,
),
@ -261,7 +264,7 @@ fn replace(
return None;
}
node.replace_children(superseded_start .. superseded_start + amount, newborns)
node.replace_children(superseded_start..superseded_start + amount, newborns)
.ok()?;
Some(newborn_span)
@ -351,11 +354,7 @@ impl Ahead {
Self {
pos,
at_start,
kind: if bounded {
AheadKind::Normal
} else {
AheadKind::Unbounded(true)
},
kind: if bounded { AheadKind::Normal } else { AheadKind::Unbounded(true) },
}
}
@ -402,7 +401,7 @@ fn is_bounded(kind: &NodeKind) -> bool {
/// previous value of the property.
fn next_at_start(kind: &NodeKind, prev: bool) -> bool {
match kind {
NodeKind::Space { newlines: (1 ..) } => true,
NodeKind::Space { newlines: (1..) } => true,
NodeKind::Space { .. } | NodeKind::LineComment | NodeKind::BlockComment => prev,
_ => false,
}

View File

@ -2,6 +2,7 @@
pub mod ast;
pub mod highlight;
mod incremental;
mod kind;
mod node;
@ -12,12 +13,12 @@ mod source;
mod span;
mod tokens;
pub use kind::*;
pub use node::*;
pub use parsing::*;
pub use source::*;
pub use span::*;
pub use tokens::*;
pub use self::kind::*;
pub use self::node::*;
pub use self::parsing::*;
pub use self::source::*;
pub use self::span::*;
pub use self::tokens::*;
use incremental::reparse;
use parser::*;

View File

@ -291,16 +291,16 @@ impl InnerNode {
let mut start = within.start;
if range.is_none() {
let end = start + stride;
self.data.numberize(id, start .. end)?;
self.data.numberize(id, start..end)?;
self.upper = within.end;
start = end;
}
// Number the children.
let len = self.children.len();
for child in &mut self.children[range.unwrap_or(0 .. len)] {
for child in &mut self.children[range.unwrap_or(0..len)] {
let end = start + child.descendants() as u64 * stride;
child.numberize(id, start .. end)?;
child.numberize(id, start..end)?;
start = end;
}
@ -377,8 +377,8 @@ impl InnerNode {
// - or if we were erroneous before due to a non-superseded node.
self.erroneous = replacement.iter().any(SyntaxNode::erroneous)
|| (self.erroneous
&& (self.children[.. range.start].iter().any(SyntaxNode::erroneous))
|| self.children[range.end ..].iter().any(SyntaxNode::erroneous));
&& (self.children[..range.start].iter().any(SyntaxNode::erroneous))
|| self.children[range.end..].iter().any(SyntaxNode::erroneous));
// Perform the replacement.
let replacement_count = replacement.len();
@ -392,7 +392,7 @@ impl InnerNode {
let max_left = range.start;
let max_right = self.children.len() - range.end;
loop {
let renumber = range.start - left .. range.end + right;
let renumber = range.start - left..range.end + right;
// The minimum assignable number is either
// - the upper bound of the node right before the to-be-renumbered
@ -416,7 +416,7 @@ impl InnerNode {
.map_or(self.upper(), |next| next.span().number());
// Try to renumber.
let within = start_number .. end_number;
let within = start_number..end_number;
let id = self.span().source();
if self.numberize(id, Some(renumber), within).is_ok() {
return Ok(());
@ -529,7 +529,7 @@ impl NodeData {
/// If the span points into this node, convert it to a byte range.
fn range(&self, span: Span, offset: usize) -> Option<Range<usize>> {
(self.span == span).then(|| offset .. offset + self.len())
(self.span == span).then(|| offset..offset + self.len())
}
}

View File

@ -97,7 +97,7 @@ impl<'s> Parser<'s> {
// Trailing trivia should not be wrapped into the new node.
let idx = self.children.len();
self.children.push(SyntaxNode::default());
self.children.extend(children.drain(until.0 ..));
self.children.extend(children.drain(until.0..));
self.children[idx] = InnerNode::with_children(kind, children).into();
}
@ -177,7 +177,11 @@ impl<'s> Parser<'s> {
/// Peek at the current token without consuming it.
pub fn peek(&self) -> Option<&NodeKind> {
if self.eof { None } else { self.current.as_ref() }
if self.eof {
None
} else {
self.current.as_ref()
}
}
/// Peek at the current token, but only if it follows immediately after the
@ -192,7 +196,7 @@ impl<'s> Parser<'s> {
/// Peek at the source of the current token.
pub fn peek_src(&self) -> &'s str {
self.get(self.current_start() .. self.current_end())
self.get(self.current_start()..self.current_end())
}
/// Obtain a range of the source code.
@ -303,7 +307,7 @@ impl<'s> Parser<'s> {
if group_mode != TokenMode::Markup {
let start = self.trivia_start().0;
target = self.current_start
- self.children[start ..].iter().map(SyntaxNode::len).sum::<usize>();
- self.children[start..].iter().map(SyntaxNode::len).sum::<usize>();
self.children.truncate(start);
}
@ -466,7 +470,7 @@ impl Marker {
/// with the given `kind`.
pub fn end(self, p: &mut Parser, kind: NodeKind) {
let until = p.trivia_start().0.max(self.0);
let children = p.children.drain(self.0 .. until).collect();
let children = p.children.drain(self.0..until).collect();
p.children
.insert(self.0, InnerNode::with_children(kind, children).into());
}
@ -476,7 +480,7 @@ impl Marker {
where
F: FnMut(&SyntaxNode) -> Result<(), &'static str>,
{
for child in &mut p.children[self.0 ..] {
for child in &mut p.children[self.0..] {
// Don't expose errors.
if child.kind().is_error() {
continue;

View File

@ -90,7 +90,7 @@ pub(crate) fn reparse_markup_elements(
let mut stopped = false;
'outer: while !p.eof() {
if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
if let Some(NodeKind::Space { newlines: (1..) }) = p.peek() {
if p.column(p.current_end()) < min_indent {
return None;
}
@ -167,7 +167,7 @@ fn markup_indented(p: &mut Parser, min_indent: usize) {
while !p.eof() {
match p.peek() {
Some(NodeKind::Space { newlines: (1 ..) })
Some(NodeKind::Space { newlines: (1..) })
if p.column(p.current_end()) < min_indent =>
{
break;
@ -195,7 +195,7 @@ where
p.perform(NodeKind::Markup { min_indent: usize::MAX }, |p| {
let mut at_start = false;
while let Some(kind) = p.peek() {
if let NodeKind::Space { newlines: (1 ..) } = kind {
if let NodeKind::Space { newlines: (1..) } = kind {
break;
}
@ -210,11 +210,7 @@ where
/// Parse a markup node.
fn markup_node(p: &mut Parser, at_start: &mut bool) {
let token = match p.peek() {
Some(t) => t,
None => return,
};
let Some(token) = p.peek() else { return };
match token {
// Whitespace.
NodeKind::Space { newlines } => {
@ -316,7 +312,7 @@ fn heading(p: &mut Parser, at_start: bool) {
markup_line(p, |kind| matches!(kind, NodeKind::Label(_)));
marker.end(p, NodeKind::Heading);
} else {
let text = p.get(current_start .. p.prev_end()).into();
let text = p.get(current_start..p.prev_end()).into();
marker.convert(p, NodeKind::Text(text));
}
}
@ -420,12 +416,9 @@ fn math_node_prec(p: &mut Parser, min_prec: usize, stop: Option<NodeKind>) {
Some(NodeKind::Underscore) => {
(NodeKind::Script, 2, Assoc::Right, Some(NodeKind::Hat))
}
Some(NodeKind::Hat) => (
NodeKind::Script,
2,
Assoc::Right,
Some(NodeKind::Underscore),
),
Some(NodeKind::Hat) => {
(NodeKind::Script, 2, Assoc::Right, Some(NodeKind::Underscore))
}
Some(NodeKind::Slash) => (NodeKind::Frac, 1, Assoc::Left, None),
_ => break,
};
@ -454,11 +447,7 @@ fn math_node_prec(p: &mut Parser, min_prec: usize, stop: Option<NodeKind>) {
/// Parse a primary math node.
fn math_primary(p: &mut Parser) {
let token = match p.peek() {
Some(t) => t,
None => return,
};
let Some(token) = p.peek() else { return };
match token {
// Spaces, atoms and expressions.
NodeKind::Space { .. }
@ -652,7 +641,6 @@ fn literal(p: &mut Parser) -> bool {
p.eat();
true
}
_ => false,
}
}
@ -724,21 +712,26 @@ enum CollectionKind {
/// Returns the length of the collection and whether the literal contained any
/// commas.
fn collection(p: &mut Parser, keyed: bool) -> (CollectionKind, usize) {
let mut kind = None;
let mut collection_kind = None;
let mut items = 0;
let mut can_group = true;
let mut missing_coma: Option<Marker> = None;
while !p.eof() {
if let Ok(item_kind) = item(p, keyed) {
let Ok(item_kind) = item(p, keyed) else {
p.eat_if(NodeKind::Comma);
collection_kind = Some(CollectionKind::Group);
continue;
};
match item_kind {
NodeKind::Spread => can_group = false,
NodeKind::Named if kind.is_none() => {
kind = Some(CollectionKind::Named);
NodeKind::Named if collection_kind.is_none() => {
collection_kind = Some(CollectionKind::Named);
can_group = false;
}
_ if kind.is_none() => {
kind = Some(CollectionKind::Positional);
_ if collection_kind.is_none() => {
collection_kind = Some(CollectionKind::Positional);
}
_ => {}
}
@ -758,16 +751,12 @@ fn collection(p: &mut Parser, keyed: bool) -> (CollectionKind, usize) {
} else {
missing_coma = Some(p.trivia_start());
}
} else {
p.eat_if(NodeKind::Comma);
kind = Some(CollectionKind::Group);
}
}
let kind = if can_group && items == 1 {
CollectionKind::Group
} else {
kind.unwrap_or(CollectionKind::Positional)
collection_kind.unwrap_or(CollectionKind::Positional)
};
(kind, items)

View File

@ -30,7 +30,6 @@ pub fn resolve_string(string: &str) -> EcoString {
None => out.push_str(s.from(start)),
}
}
_ => out.push_str(s.from(start)),
}
}
@ -66,10 +65,7 @@ pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawKind {
/// Parse the lang tag and return it alongside the remaining inner raw text.
fn split_at_lang_tag(raw: &str) -> (&str, &str) {
let mut s = Scanner::new(raw);
(
s.eat_until(|c: char| c == '`' || c.is_whitespace() || is_newline(c)),
s.after(),
)
(s.eat_until(|c: char| c == '`' || c.is_whitespace() || is_newline(c)), s.after())
}
/// Trim raw text and splits it into lines.
@ -94,7 +90,7 @@ fn trim_and_split_raw(column: usize, mut raw: &str) -> (String, bool) {
.take_while(|c| c.is_whitespace())
.map(char::len_utf8)
.sum();
*line = &line[offset ..];
*line = &line[offset..];
}
let had_newline = lines.len() > 1;
@ -127,13 +123,13 @@ fn split_lines(text: &str) -> Vec<&str> {
s.eat_if('\n');
}
lines.push(&text[start .. end]);
lines.push(&text[start..end]);
start = s.cursor();
}
end = s.cursor();
}
lines.push(&text[start ..]);
lines.push(&text[start..]);
lines
}

View File

@ -123,13 +123,13 @@ impl Source {
self.lines.truncate(line + 1);
// Handle adjoining of \r and \n.
if self.text[.. start_byte].ends_with('\r') && with.starts_with('\n') {
if self.text[..start_byte].ends_with('\r') && with.starts_with('\n') {
self.lines.pop();
}
// Recalculate the line starts after the edit.
self.lines
.extend(lines(start_byte, start_utf16, &self.text[start_byte ..]));
.extend(lines(start_byte, start_utf16, &self.text[start_byte..]));
// Incrementally reparse the replaced range.
let mut root = std::mem::take(&mut self.root).into_inner();
@ -146,7 +146,7 @@ impl Source {
/// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize {
let last = self.lines.last().unwrap();
last.utf16_idx + self.text[last.byte_idx ..].len_utf16()
last.utf16_idx + self.text[last.byte_idx..].len_utf16()
}
/// Get the length of the file in lines.
@ -167,7 +167,7 @@ impl Source {
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
let line_idx = self.byte_to_line(byte_idx)?;
let line = self.lines.get(line_idx)?;
let head = self.text.get(line.byte_idx .. byte_idx)?;
let head = self.text.get(line.byte_idx..byte_idx)?;
Some(line.utf16_idx + head.len_utf16())
}
@ -188,7 +188,7 @@ impl Source {
pub fn byte_to_column(&self, byte_idx: usize) -> Option<usize> {
let line = self.byte_to_line(byte_idx)?;
let start = self.line_to_byte(line)?;
let head = self.get(start .. byte_idx)?;
let head = self.get(start..byte_idx)?;
Some(head.chars().count())
}
@ -202,7 +202,7 @@ impl Source {
)?;
let mut k = line.utf16_idx;
for (i, c) in self.text[line.byte_idx ..].char_indices() {
for (i, c) in self.text[line.byte_idx..].char_indices() {
if k >= utf16_idx {
return Some(line.byte_idx + i);
}
@ -212,7 +212,6 @@ impl Source {
(k == utf16_idx).then(|| self.text.len())
}
/// Return the byte position at which the given line starts.
pub fn line_to_byte(&self, line_idx: usize) -> Option<usize> {
self.lines.get(line_idx).map(|line| line.byte_idx)
@ -222,7 +221,7 @@ impl Source {
pub fn line_to_range(&self, line_idx: usize) -> Option<Range<usize>> {
let start = self.line_to_byte(line_idx)?;
let end = self.line_to_byte(line_idx + 1).unwrap_or(self.text.len());
Some(start .. end)
Some(start..end)
}
/// Return the byte index of the given (line, column) pair.
@ -237,7 +236,7 @@ impl Source {
let range = self.line_to_range(line_idx)?;
let line = self.get(range.clone())?;
let mut chars = line.chars();
for _ in 0 .. column_idx {
for _ in 0..column_idx {
chars.next();
}
Some(range.start + (line.len() - chars.as_str().len()))
@ -312,10 +311,7 @@ fn lines(
utf16_idx += 1;
}
Some(Line {
byte_idx: byte_offset + s.cursor(),
utf16_idx,
})
Some(Line { byte_idx: byte_offset + s.cursor(), utf16_idx })
})
}
@ -328,12 +324,15 @@ mod tests {
#[test]
fn test_source_file_new() {
let source = Source::detached(TEST);
assert_eq!(source.lines, [
assert_eq!(
source.lines,
[
Line { byte_idx: 0, utf16_idx: 0 },
Line { byte_idx: 7, utf16_idx: 6 },
Line { byte_idx: 15, utf16_idx: 12 },
Line { byte_idx: 18, utf16_idx: 15 },
]);
]
);
}
#[test]
@ -411,20 +410,20 @@ mod tests {
}
// Test inserting at the begining.
test("abc\n", 0 .. 0, "hi\n", "hi\nabc\n");
test("\nabc", 0 .. 0, "hi\r", "hi\r\nabc");
test("abc\n", 0..0, "hi\n", "hi\nabc\n");
test("\nabc", 0..0, "hi\r", "hi\r\nabc");
// Test editing in the middle.
test(TEST, 4 .. 16, "", "ä\tc❌i\rjkl");
test(TEST, 4..16, "", "ä\tc❌i\rjkl");
// Test appending.
test("abc\ndef", 7 .. 7, "hi", "abc\ndefhi");
test("abc\ndef\n", 8 .. 8, "hi", "abc\ndef\nhi");
test("abc\ndef", 7..7, "hi", "abc\ndefhi");
test("abc\ndef\n", 8..8, "hi", "abc\ndef\nhi");
// Test appending with adjoining \r and \n.
test("abc\ndef\r", 8 .. 8, "\nghi", "abc\ndef\r\nghi");
test("abc\ndef\r", 8..8, "\nghi", "abc\ndef\r\nghi");
// Test removing everything.
test(TEST, 0 .. 21, "", "");
test(TEST, 0..21, "", "");
}
}

View File

@ -70,7 +70,7 @@ impl Span {
const DETACHED: u64 = 1;
/// The full range of numbers available to spans.
pub const FULL: Range<u64> = 2 .. (1 << Self::BITS);
pub const FULL: Range<u64> = 2..(1 << Self::BITS);
/// Create a new span from a source id and a unique number.
///

View File

@ -373,7 +373,7 @@ impl<'s> Tokens<'s> {
NodeKind::Raw(Arc::new(resolve_raw(
column,
backticks,
self.s.get(start .. end),
self.s.get(start..end),
)))
} else {
self.terminated = false;
@ -548,7 +548,7 @@ impl<'s> Tokens<'s> {
self.s.eat_while(char::is_ascii_alphanumeric);
}
let number = self.s.get(start .. suffix_start);
let number = self.s.get(start..suffix_start);
let suffix = self.s.from(suffix_start);
// Find out whether it is a simple number.
@ -558,9 +558,8 @@ impl<'s> Tokens<'s> {
}
}
let v = match number.parse::<f64>() {
Ok(v) => v,
Err(_) => return NodeKind::Error(ErrorPos::Full, "invalid number".into()),
let Ok(v) = number.parse::<f64>() else {
return NodeKind::Error(ErrorPos::Full, "invalid number".into());
};
match suffix {
@ -636,7 +635,7 @@ fn keyword(ident: &str) -> Option<NodeKind> {
#[inline]
fn column(string: &str, index: usize, offset: usize) -> usize {
let mut apply_offset = false;
let res = string[.. index]
let res = string[..index]
.char_indices()
.rev()
.take_while(|&(_, c)| !is_newline(c))
@ -653,7 +652,11 @@ fn column(string: &str, index: usize, offset: usize) -> usize {
apply_offset = true;
}
if apply_offset { res + offset } else { res }
if apply_offset {
res + offset
} else {
res
}
}
/// Whether this character denotes a newline.
@ -767,8 +770,8 @@ mod tests {
// - mode in which the suffix is applicable
// - the suffix string
// - the resulting suffix NodeKind
fn suffixes()
-> impl Iterator<Item = (char, Option<TokenMode>, &'static str, NodeKind)> {
fn suffixes(
) -> impl Iterator<Item = (char, Option<TokenMode>, &'static str, NodeKind)> {
[
// Whitespace suffixes.
(' ', None, " ", Space(0)),

View File

@ -68,7 +68,7 @@ impl EcoString {
let len = slice.len();
Self(if len <= LIMIT {
let mut buf = [0; LIMIT];
buf[.. len].copy_from_slice(slice.as_bytes());
buf[..len].copy_from_slice(slice.as_bytes());
Repr::Small { buf, len: len as u8 }
} else {
Repr::Large(Arc::new(s.into()))
@ -116,7 +116,7 @@ impl EcoString {
let prev = usize::from(*len);
let new = prev + string.len();
if new <= LIMIT {
buf[prev .. new].copy_from_slice(string.as_bytes());
buf[prev..new].copy_from_slice(string.as_bytes());
*len = new as u8;
} else {
let mut spilled = String::with_capacity(new);
@ -161,7 +161,7 @@ impl EcoString {
pub fn to_lowercase(&self) -> Self {
if let Repr::Small { mut buf, len } = self.0 {
if self.is_ascii() {
buf[.. usize::from(len)].make_ascii_lowercase();
buf[..usize::from(len)].make_ascii_lowercase();
return Self(Repr::Small { buf, len });
}
}
@ -173,7 +173,7 @@ impl EcoString {
pub fn to_uppercase(&self) -> Self {
if let Repr::Small { mut buf, len } = self.0 {
if self.is_ascii() {
buf[.. usize::from(len)].make_ascii_uppercase();
buf[..usize::from(len)].make_ascii_uppercase();
return Self(Repr::Small { buf, len });
}
}
@ -191,10 +191,10 @@ impl EcoString {
let prev = usize::from(len);
let new = prev.saturating_mul(n);
if new <= LIMIT {
let src = &buf[.. prev];
let src = &buf[..prev];
let mut buf = [0; LIMIT];
for i in 0 .. n {
buf[prev * i .. prev * (i + 1)].copy_from_slice(src);
for i in 0..n {
buf[prev * i..prev * (i + 1)].copy_from_slice(src);
}
return Self(Repr::Small { buf, len: new as u8 });
}
@ -217,7 +217,7 @@ impl Deref for EcoString {
// Furthermore, we still do the bounds-check on the len in case
// it gets corrupted somehow.
Repr::Small { buf, len } => unsafe {
std::str::from_utf8_unchecked(&buf[.. usize::from(*len)])
std::str::from_utf8_unchecked(&buf[..usize::from(*len)])
},
Repr::Large(string) => string.as_str(),
}
@ -398,9 +398,9 @@ mod tests {
assert_eq!(EcoString::from("abc"), "abc");
// Test around the inline limit.
assert_eq!(EcoString::from(&ALPH[.. LIMIT - 1]), ALPH[.. LIMIT - 1]);
assert_eq!(EcoString::from(&ALPH[.. LIMIT]), ALPH[.. LIMIT]);
assert_eq!(EcoString::from(&ALPH[.. LIMIT + 1]), ALPH[.. LIMIT + 1]);
assert_eq!(EcoString::from(&ALPH[..LIMIT - 1]), ALPH[..LIMIT - 1]);
assert_eq!(EcoString::from(&ALPH[..LIMIT]), ALPH[..LIMIT]);
assert_eq!(EcoString::from(&ALPH[..LIMIT + 1]), ALPH[..LIMIT + 1]);
// Test heap string.
assert_eq!(EcoString::from(ALPH), ALPH);
@ -443,7 +443,7 @@ mod tests {
assert_eq!(v, "Hello World");
// Remove one-by-one.
for _ in 0 .. 10 {
for _ in 0..10 {
v.pop();
}
@ -462,7 +462,7 @@ mod tests {
fn test_str_index() {
// Test that we can use the index syntax.
let v = EcoString::from("abc");
assert_eq!(&v[.. 2], "ab");
assert_eq!(&v[..2], "ab");
}
#[test]

View File

@ -5,7 +5,7 @@
//! pointer metadata APIs are stable, we should definitely move to them:
//! <https://github.com/rust-lang/rust/issues/81513>
use std::alloc;
use std::alloc::Layout;
use std::mem;
/// Create a fat pointer from a data address and a vtable address.
@ -15,12 +15,8 @@ use std::mem;
/// to a value whose type implements the trait of `T` and the `vtable` must have
/// been extracted with [`vtable`].
pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *const T {
debug_assert_eq!(
alloc::Layout::new::<*const T>(),
alloc::Layout::new::<FatPointer>(),
);
let fat = FatPointer { data, vtable };
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
mem::transmute_copy::<FatPointer, *const T>(&fat)
}
@ -31,12 +27,8 @@ pub unsafe fn from_raw_parts<T: ?Sized>(data: *const (), vtable: *const ()) -> *
/// to a value whose type implements the trait of `T` and the `vtable` must have
/// been extracted with [`vtable`].
pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) -> *mut T {
debug_assert_eq!(
alloc::Layout::new::<*mut T>(),
alloc::Layout::new::<FatPointer>(),
);
let fat = FatPointer { data, vtable };
debug_assert_eq!(Layout::new::<*mut T>(), Layout::new::<FatPointer>());
mem::transmute_copy::<FatPointer, *mut T>(&fat)
}
@ -45,11 +37,7 @@ pub unsafe fn from_raw_parts_mut<T: ?Sized>(data: *mut (), vtable: *const ()) ->
/// # Safety
/// Must only be called when `T` is a `dyn Trait`.
pub unsafe fn vtable<T: ?Sized>(ptr: *const T) -> *const () {
debug_assert_eq!(
alloc::Layout::new::<*const T>(),
alloc::Layout::new::<FatPointer>(),
);
debug_assert_eq!(Layout::new::<*const T>(), Layout::new::<FatPointer>());
mem::transmute_copy::<*const T, FatPointer>(&ptr).vtable
}

View File

@ -2,13 +2,13 @@
pub mod fat;
pub use buffer::Buffer;
pub use eco::{format_eco, EcoString};
#[macro_use]
mod eco;
mod buffer;
pub use buffer::Buffer;
pub use eco::{format_eco, EcoString};
use std::any::TypeId;
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;

View File

@ -58,7 +58,7 @@ fn bench_parse(iai: &mut Iai) {
fn bench_edit(iai: &mut Iai) {
let mut source = Source::detached(TEXT);
iai.run(|| black_box(source.edit(1168 .. 1171, "_Uhr_")));
iai.run(|| black_box(source.edit(1168..1171, "_Uhr_")));
}
fn bench_highlight(iai: &mut Iai) {
@ -66,7 +66,7 @@ fn bench_highlight(iai: &mut Iai) {
iai.run(|| {
typst::syntax::highlight::highlight_categories(
source.root(),
0 .. source.len_bytes(),
0..source.len_bytes(),
&mut |_, _| {},
)
});

View File

@ -75,13 +75,8 @@ fn main() {
let pdf_path =
args.pdf.then(|| Path::new(PDF_DIR).join(path).with_extension("pdf"));
ok += test(
&mut world,
&src_path,
&png_path,
&ref_path,
pdf_path.as_deref(),
) as usize;
ok += test(&mut world, &src_path, &png_path, &ref_path, pdf_path.as_deref())
as usize;
}
if len > 1 {
@ -153,10 +148,8 @@ fn config() -> Config {
let mut styles = typst_library::styles();
styles.set(PageNode::WIDTH, Smart::Custom(Abs::pt(120.0).into()));
styles.set(PageNode::HEIGHT, Smart::Auto);
styles.set(
PageNode::MARGINS,
Sides::splat(Some(Smart::Custom(Abs::pt(10.0).into()))),
);
styles
.set(PageNode::MARGINS, Sides::splat(Some(Smart::Custom(Abs::pt(10.0).into()))));
styles.set(TextNode::SIZE, TextSize(Abs::pt(10.0).into()));
// Hook up helpers into the global scope.
@ -217,7 +210,7 @@ impl TestWorld {
.filter(|entry| entry.file_type().is_file())
{
let buffer: Buffer = fs::read(entry.path()).unwrap().into();
for index in 0 .. ttf_parser::fonts_in_collection(&buffer).unwrap_or(1) {
for index in 0..ttf_parser::fonts_in_collection(&buffer).unwrap_or(1) {
fonts.push(Font::new(buffer.clone(), index).unwrap())
}
}
@ -480,18 +473,12 @@ fn parse_metadata(source: &Source) -> (Option<bool>, Vec<(Range<usize>, String)>
compare_ref = Some(true);
}
let rest = if let Some(rest) = line.strip_prefix("// Error: ") {
rest
} else {
continue;
};
fn num(s: &mut Scanner) -> usize {
s.eat_while(char::is_numeric).parse().unwrap()
}
let comments =
lines[i ..].iter().take_while(|line| line.starts_with("//")).count();
lines[i..].iter().take_while(|line| line.starts_with("//")).count();
let pos = |s: &mut Scanner| -> usize {
let first = num(s) - 1;
@ -501,10 +488,11 @@ fn parse_metadata(source: &Source) -> (Option<bool>, Vec<(Range<usize>, String)>
source.line_column_to_byte(line, column).unwrap()
};
let Some(rest) = line.strip_prefix("// Error: ") else { continue };
let mut s = Scanner::new(rest);
let start = pos(&mut s);
let end = if s.eat_if('-') { pos(&mut s) } else { start };
let range = start .. end;
let range = start..end;
errors.push((range, s.after().trim().to_string()));
}
@ -582,10 +570,7 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
);
println!(" Expected reference tree:\n{ref_root:#?}\n");
println!(" Found incremental tree:\n{incr_root:#?}");
println!(
" Full source ({}):\n\"{edited_src:?}\"",
edited_src.len()
);
println!(" Full source ({}):\n\"{edited_src:?}\"", edited_src.len());
}
ok &= test_spans(ref_root);
@ -599,23 +584,23 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
};
let insertions = (text.len() as f64 / 400.0).ceil() as usize;
for _ in 0 .. insertions {
let supplement = supplements[pick(0 .. supplements.len())];
let start = pick(0 .. text.len());
let end = pick(start .. text.len());
for _ in 0..insertions {
let supplement = supplements[pick(0..supplements.len())];
let start = pick(0..text.len());
let end = pick(start..text.len());
if !text.is_char_boundary(start) || !text.is_char_boundary(end) {
continue;
}
ok &= apply(start .. end, supplement);
ok &= apply(start..end, supplement);
}
let source = Source::detached(text);
let leafs = source.root().leafs();
let start = source.range(leafs[pick(0 .. leafs.len())].span()).start;
let supplement = supplements[pick(0 .. supplements.len())];
ok &= apply(start .. start, supplement);
let start = source.range(leafs[pick(0..leafs.len())].span()).start;
let supplement = supplements[pick(0..supplements.len())];
ok &= apply(start..start, supplement);
ok
}
@ -623,24 +608,21 @@ fn test_reparse(text: &str, i: usize, rng: &mut LinearShift) -> bool {
/// Ensure that all spans are properly ordered (and therefore unique).
#[track_caller]
fn test_spans(root: &SyntaxNode) -> bool {
test_spans_impl(root, 0 .. u64::MAX)
test_spans_impl(root, 0..u64::MAX)
}
#[track_caller]
fn test_spans_impl(node: &SyntaxNode, within: Range<u64>) -> bool {
if !within.contains(&node.span().number()) {
eprintln!(" Node: {node:#?}");
eprintln!(
" Wrong span order: {} not in {within:?} ❌",
node.span().number(),
);
eprintln!(" Wrong span order: {} not in {within:?}", node.span().number(),);
}
let start = node.span().number() + 1;
let mut children = node.children().peekable();
while let Some(child) = children.next() {
let end = children.peek().map_or(within.end, |next| next.span().number());
if !test_spans_impl(child, start .. end) {
if !test_spans_impl(child, start..end) {
return false;
}
}