mirror of
https://github.com/typst/typst
synced 2025-08-08 04:07:56 +08:00
Use rustc-hash
for hash maps and sets (#6678)
This commit is contained in:
parent
59243dadbb
commit
88dfe4d276
13
Cargo.lock
generated
13
Cargo.lock
generated
@ -2906,6 +2906,7 @@ version = "0.13.1"
|
||||
dependencies = [
|
||||
"comemo",
|
||||
"ecow",
|
||||
"rustc-hash",
|
||||
"typst-eval",
|
||||
"typst-html",
|
||||
"typst-layout",
|
||||
@ -2941,6 +2942,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"pathdiff",
|
||||
"rayon",
|
||||
"rustc-hash",
|
||||
"same-file",
|
||||
"self-replace",
|
||||
"semver",
|
||||
@ -2981,6 +2983,7 @@ dependencies = [
|
||||
"ecow",
|
||||
"heck",
|
||||
"pulldown-cmark",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_yaml 0.9.34+deprecated",
|
||||
@ -3003,6 +3006,7 @@ dependencies = [
|
||||
"comemo",
|
||||
"ecow",
|
||||
"indexmap 2.7.1",
|
||||
"rustc-hash",
|
||||
"stacker",
|
||||
"toml",
|
||||
"typst-library",
|
||||
@ -3033,6 +3037,7 @@ dependencies = [
|
||||
"comemo",
|
||||
"ecow",
|
||||
"palette",
|
||||
"rustc-hash",
|
||||
"time",
|
||||
"typst-assets",
|
||||
"typst-library",
|
||||
@ -3051,6 +3056,7 @@ dependencies = [
|
||||
"ecow",
|
||||
"once_cell",
|
||||
"pathdiff",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"typst",
|
||||
"typst-assets",
|
||||
@ -3100,6 +3106,7 @@ dependencies = [
|
||||
"icu_segmenter",
|
||||
"kurbo",
|
||||
"memchr",
|
||||
"rustc-hash",
|
||||
"rustybuzz",
|
||||
"smallvec",
|
||||
"ttf-parser",
|
||||
@ -3151,6 +3158,7 @@ dependencies = [
|
||||
"regex-syntax",
|
||||
"roxmltree",
|
||||
"rust_decimal",
|
||||
"rustc-hash",
|
||||
"rustybuzz",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@ -3200,6 +3208,7 @@ dependencies = [
|
||||
"infer",
|
||||
"krilla",
|
||||
"krilla-svg",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"typst-assets",
|
||||
"typst-library",
|
||||
@ -3253,6 +3262,7 @@ dependencies = [
|
||||
"flate2",
|
||||
"hayro",
|
||||
"image",
|
||||
"rustc-hash",
|
||||
"ttf-parser",
|
||||
"typst-assets",
|
||||
"typst-library",
|
||||
@ -3268,6 +3278,7 @@ name = "typst-syntax"
|
||||
version = "0.13.1"
|
||||
dependencies = [
|
||||
"ecow",
|
||||
"rustc-hash",
|
||||
"serde",
|
||||
"toml",
|
||||
"typst-timing",
|
||||
@ -3290,6 +3301,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"rayon",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"tiny-skia",
|
||||
"typst",
|
||||
"typst-assets",
|
||||
@ -3321,6 +3333,7 @@ dependencies = [
|
||||
"once_cell",
|
||||
"portable-atomic",
|
||||
"rayon",
|
||||
"rustc-hash",
|
||||
"siphasher",
|
||||
"thin-vec",
|
||||
"unicode-math-class",
|
||||
|
@ -59,6 +59,7 @@ fastrand = "2.3"
|
||||
flate2 = "1"
|
||||
fontdb = { version = "0.23", default-features = false }
|
||||
fs_extra = "1.3"
|
||||
rustc-hash = "2.1"
|
||||
glidesort = "0.1.2"
|
||||
hayagriva = "0.8.1"
|
||||
hayro-syntax = { git = "https://github.com/LaurenzV/hayro", rev = "e701f95" }
|
||||
|
@ -41,6 +41,7 @@ open = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
same-file = { workspace = true }
|
||||
self-replace = { workspace = true, optional = true }
|
||||
semver = { workspace = true }
|
||||
|
@ -1,4 +1,3 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::io::{self, Write};
|
||||
use std::iter;
|
||||
use std::path::PathBuf;
|
||||
@ -9,6 +8,7 @@ use codespan_reporting::term::termcolor::WriteColor;
|
||||
use codespan_reporting::term::{self, termcolor};
|
||||
use ecow::eco_format;
|
||||
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher as _};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use same_file::is_same_file;
|
||||
use typst::diag::{StrResult, bail, warning};
|
||||
use typst::syntax::Span;
|
||||
@ -91,10 +91,10 @@ struct Watcher {
|
||||
/// Keeps track of which paths are watched via `watcher`. The boolean is
|
||||
/// used during updating for mark-and-sweep garbage collection of paths we
|
||||
/// should unwatch.
|
||||
watched: HashMap<PathBuf, bool>,
|
||||
watched: FxHashMap<PathBuf, bool>,
|
||||
/// A set of files that should be watched, but don't exist. We manually poll
|
||||
/// for those.
|
||||
missing: HashSet<PathBuf>,
|
||||
missing: FxHashSet<PathBuf>,
|
||||
}
|
||||
|
||||
impl Watcher {
|
||||
@ -127,8 +127,8 @@ impl Watcher {
|
||||
output,
|
||||
rx,
|
||||
watcher,
|
||||
watched: HashMap::new(),
|
||||
missing: HashSet::new(),
|
||||
watched: FxHashMap::default(),
|
||||
missing: FxHashSet::default(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
use std::collections::HashMap;
|
||||
use std::io::Read;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{LazyLock, OnceLock};
|
||||
@ -7,6 +6,7 @@ use std::{fmt, fs, io, mem};
|
||||
use chrono::{DateTime, Datelike, FixedOffset, Local, Utc};
|
||||
use ecow::{EcoString, eco_format};
|
||||
use parking_lot::Mutex;
|
||||
use rustc_hash::FxHashMap;
|
||||
use typst::diag::{FileError, FileResult};
|
||||
use typst::foundations::{Bytes, Datetime, Dict, IntoValue};
|
||||
use typst::syntax::{FileId, Lines, Source, VirtualPath};
|
||||
@ -41,7 +41,7 @@ pub struct SystemWorld {
|
||||
/// Locations of and storage for lazily loaded fonts.
|
||||
fonts: Vec<FontSlot>,
|
||||
/// Maps file ids to source files and buffers.
|
||||
slots: Mutex<HashMap<FileId, FileSlot>>,
|
||||
slots: Mutex<FxHashMap<FileId, FileSlot>>,
|
||||
/// Holds information about where packages are stored.
|
||||
package_storage: PackageStorage,
|
||||
/// The current datetime if requested. This is stored here to ensure it is
|
||||
@ -139,7 +139,7 @@ impl SystemWorld {
|
||||
library: LazyHash::new(library),
|
||||
book: LazyHash::new(fonts.book),
|
||||
fonts: fonts.fonts,
|
||||
slots: Mutex::new(HashMap::new()),
|
||||
slots: Mutex::new(FxHashMap::default()),
|
||||
package_storage: package::storage(&world_args.package),
|
||||
now,
|
||||
})
|
||||
|
@ -21,6 +21,7 @@ typst-utils = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
unicode-segmentation = { workspace = true }
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use ecow::eco_format;
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail, error};
|
||||
use typst_library::foundations::{Array, Dict, Value};
|
||||
use typst_syntax::ast::{self, AstNode};
|
||||
@ -137,7 +136,7 @@ where
|
||||
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<()>,
|
||||
{
|
||||
let mut sink = None;
|
||||
let mut used = HashSet::new();
|
||||
let mut used = FxHashSet::default();
|
||||
|
||||
for p in destruct.items() {
|
||||
match p {
|
||||
|
@ -246,7 +246,7 @@ impl Eval for ast::Dict<'_> {
|
||||
type Output = Dict;
|
||||
|
||||
fn eval(self, vm: &mut Vm) -> SourceResult<Self::Output> {
|
||||
let mut map = indexmap::IndexMap::new();
|
||||
let mut map = indexmap::IndexMap::default();
|
||||
let mut invalid_keys = eco_vec![];
|
||||
|
||||
for item in self.items() {
|
||||
|
@ -24,6 +24,7 @@ bumpalo = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
palette = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
time = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::collections::HashSet;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
use comemo::{Tracked, TrackedMut};
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_library::World;
|
||||
use typst_library::diag::{SourceResult, bail};
|
||||
use typst_library::engine::{Engine, Route, Sink, Traced};
|
||||
@ -87,7 +87,7 @@ fn html_document_impl(
|
||||
children.iter().copied(),
|
||||
)?;
|
||||
|
||||
let mut link_targets = HashSet::new();
|
||||
let mut link_targets = FxHashSet::default();
|
||||
let mut introspector = introspect_html(&output, &mut link_targets);
|
||||
let mut root = root_element(output, &info)?;
|
||||
crate::link::identify_link_targets(&mut root, &mut introspector, link_targets);
|
||||
@ -99,12 +99,12 @@ fn html_document_impl(
|
||||
#[typst_macros::time(name = "introspect html")]
|
||||
fn introspect_html(
|
||||
output: &[HtmlNode],
|
||||
link_targets: &mut HashSet<Location>,
|
||||
link_targets: &mut FxHashSet<Location>,
|
||||
) -> Introspector {
|
||||
fn discover(
|
||||
builder: &mut IntrospectorBuilder,
|
||||
sink: &mut Vec<(Content, Position)>,
|
||||
link_targets: &mut HashSet<Location>,
|
||||
link_targets: &mut FxHashSet<Location>,
|
||||
nodes: &[HtmlNode],
|
||||
) {
|
||||
for node in nodes {
|
||||
|
@ -1,7 +1,8 @@
|
||||
use std::collections::{HashMap, HashSet, VecDeque};
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use typst_library::foundations::{Label, NativeElement};
|
||||
use typst_library::introspection::{Introspector, Location, Tag};
|
||||
use typst_library::layout::{Frame, FrameItem, Point};
|
||||
@ -16,7 +17,7 @@ use crate::{HtmlElement, HtmlNode, attr, tag};
|
||||
/// in favor of the query in `identify_link_targets`. For the time being, some
|
||||
/// links are created without existence of a `LinkElem`, so this is
|
||||
/// unfortunately necessary.
|
||||
pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) {
|
||||
pub fn introspect_frame_links(frame: &Frame, targets: &mut FxHashSet<Location>) {
|
||||
for (_, item) in frame.items() {
|
||||
match item {
|
||||
FrameItem::Link(Destination::Location(loc), _) => {
|
||||
@ -35,7 +36,7 @@ pub fn introspect_frame_links(frame: &Frame, targets: &mut HashSet<Location>) {
|
||||
pub fn identify_link_targets(
|
||||
root: &mut HtmlElement,
|
||||
introspector: &mut Introspector,
|
||||
mut targets: HashSet<Location>,
|
||||
mut targets: FxHashSet<Location>,
|
||||
) {
|
||||
// Query for all links with an intra-doc (i.e. `Location`) destination to
|
||||
// know what needs IDs.
|
||||
@ -72,7 +73,7 @@ pub fn identify_link_targets(
|
||||
/// Traverses a list of nodes.
|
||||
fn traverse(
|
||||
work: &mut Work,
|
||||
targets: &HashSet<Location>,
|
||||
targets: &FxHashSet<Location>,
|
||||
identificator: &mut Identificator<'_>,
|
||||
nodes: &mut Vec<HtmlNode>,
|
||||
) {
|
||||
@ -144,7 +145,7 @@ fn traverse(
|
||||
/// Traverses a frame embedded in HTML.
|
||||
fn traverse_frame(
|
||||
work: &mut Work,
|
||||
targets: &HashSet<Location>,
|
||||
targets: &FxHashSet<Location>,
|
||||
identificator: &mut Identificator<'_>,
|
||||
frame: &Frame,
|
||||
link_points: &mut Vec<(Point, EcoString)>,
|
||||
@ -174,13 +175,13 @@ struct Work {
|
||||
/// now.
|
||||
queue: VecDeque<(Location, Option<Label>)>,
|
||||
/// The resulting mapping from element location's to HTML IDs.
|
||||
ids: HashMap<Location, EcoString>,
|
||||
ids: FxHashMap<Location, EcoString>,
|
||||
}
|
||||
|
||||
impl Work {
|
||||
/// Sets up.
|
||||
fn new() -> Self {
|
||||
Self { queue: VecDeque::new(), ids: HashMap::new() }
|
||||
Self { queue: VecDeque::new(), ids: FxHashMap::default() }
|
||||
}
|
||||
|
||||
/// Marks the element with the given location and label as in need of an
|
||||
@ -215,7 +216,7 @@ impl Work {
|
||||
struct Identificator<'a> {
|
||||
introspector: &'a Introspector,
|
||||
loc_counter: usize,
|
||||
label_counter: HashMap<Label, usize>,
|
||||
label_counter: FxHashMap<Label, usize>,
|
||||
}
|
||||
|
||||
impl<'a> Identificator<'a> {
|
||||
@ -224,7 +225,7 @@ impl<'a> Identificator<'a> {
|
||||
Self {
|
||||
introspector,
|
||||
loc_counter: 0,
|
||||
label_counter: HashMap::new(),
|
||||
label_counter: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,7 @@ typst-eval = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
pathdiff = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
unscanny = { workspace = true }
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::{EcoString, EcoVec, eco_vec};
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst::foundations::{Label, Styles, Value};
|
||||
use typst::layout::PagedDocument;
|
||||
use typst::model::{BibliographyElem, FigureElem};
|
||||
@ -76,7 +75,7 @@ pub fn analyze_labels(
|
||||
document: &PagedDocument,
|
||||
) -> (Vec<(Label, Option<EcoString>)>, usize) {
|
||||
let mut output = vec![];
|
||||
let mut seen_labels = HashSet::new();
|
||||
let mut seen_labels = FxHashSet::default();
|
||||
|
||||
// Labels in the document.
|
||||
for elem in document.introspector.all() {
|
||||
|
@ -1,8 +1,9 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
use std::collections::BTreeMap;
|
||||
use std::ffi::OsStr;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use typst::foundations::{
|
||||
AutoValue, CastInfo, Func, Label, NoneValue, ParamInfo, Repr, StyleChain, Styles,
|
||||
@ -739,7 +740,7 @@ fn param_completions<'a>(
|
||||
|
||||
// Determine which arguments are already present.
|
||||
let mut existing_positional = 0;
|
||||
let mut existing_named = HashSet::new();
|
||||
let mut existing_named = FxHashSet::default();
|
||||
for arg in args.items() {
|
||||
match arg {
|
||||
ast::Arg::Pos(_) => {
|
||||
@ -1116,7 +1117,7 @@ struct CompletionContext<'a> {
|
||||
explicit: bool,
|
||||
from: usize,
|
||||
completions: Vec<Completion>,
|
||||
seen_casts: HashSet<u128>,
|
||||
seen_casts: FxHashSet<u128>,
|
||||
}
|
||||
|
||||
impl<'a> CompletionContext<'a> {
|
||||
@ -1141,7 +1142,7 @@ impl<'a> CompletionContext<'a> {
|
||||
explicit,
|
||||
from: cursor,
|
||||
completions: vec![],
|
||||
seen_casts: HashSet::new(),
|
||||
seen_casts: FxHashSet::default(),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ecow::EcoString;
|
||||
use rustc_hash::FxHashMap;
|
||||
use typst::diag::{FileError, FileResult};
|
||||
use typst::foundations::{Bytes, Datetime, Smart};
|
||||
use typst::layout::{Abs, Margin, PageElem};
|
||||
@ -137,8 +137,8 @@ impl IdeWorld for TestWorld {
|
||||
/// Test-specific files.
|
||||
#[derive(Default, Clone)]
|
||||
struct TestFiles {
|
||||
assets: HashMap<FileId, Bytes>,
|
||||
sources: HashMap<FileId, Source>,
|
||||
assets: FxHashMap<FileId, Bytes>,
|
||||
sources: FxHashMap<FileId, Source>,
|
||||
}
|
||||
|
||||
/// Shared foundation of all test worlds.
|
||||
|
@ -32,6 +32,7 @@ icu_provider_blob = { workspace = true }
|
||||
icu_segmenter = { workspace = true }
|
||||
kurbo = { workspace = true }
|
||||
memchr = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
rustybuzz = { workspace = true }
|
||||
smallvec = { workspace = true }
|
||||
ttf-parser = { workspace = true }
|
||||
|
@ -7,13 +7,13 @@ mod distribute;
|
||||
|
||||
pub(crate) use self::block::unbreakable_pod;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::rc::Rc;
|
||||
|
||||
use bumpalo::Bump;
|
||||
use comemo::{Track, Tracked, TrackedMut};
|
||||
use ecow::EcoVec;
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_library::World;
|
||||
use typst_library::diag::{At, SourceDiagnostic, SourceResult, bail};
|
||||
use typst_library::engine::{Engine, Route, Sink, Traced};
|
||||
@ -303,7 +303,7 @@ struct Work<'a, 'b> {
|
||||
/// Identifies floats and footnotes that can be skipped if visited because
|
||||
/// they were already handled and incorporated as column or page level
|
||||
/// insertions.
|
||||
skips: Rc<HashSet<Location>>,
|
||||
skips: Rc<FxHashSet<Location>>,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Work<'a, 'b> {
|
||||
@ -316,7 +316,7 @@ impl<'a, 'b> Work<'a, 'b> {
|
||||
footnotes: EcoVec::new(),
|
||||
footnote_spill: None,
|
||||
tags: EcoVec::new(),
|
||||
skips: Rc::new(HashSet::new()),
|
||||
skips: Rc::new(FxHashSet::default()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_library::foundations::StyleChain;
|
||||
use typst_library::introspection::{Locator, SplitLocator, Tag, TagElem};
|
||||
use typst_library::layout::{PagebreakElem, Parity};
|
||||
@ -134,7 +133,7 @@ fn migrate_unterminated_tags(children: &mut [Pair], mid: usize) -> usize {
|
||||
|
||||
// Determine the set of tag locations which we won't migrate (because they
|
||||
// are terminated).
|
||||
let excluded: HashSet<_> = children[start..mid]
|
||||
let excluded: FxHashSet<_> = children[start..mid]
|
||||
.iter()
|
||||
.filter_map(|(c, _)| match c.to_packed::<TagElem>()?.tag {
|
||||
Tag::Start(_) => None,
|
||||
|
@ -50,6 +50,7 @@ regex = { workspace = true }
|
||||
regex-syntax = { workspace = true }
|
||||
roxmltree = { workspace = true }
|
||||
rust_decimal = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
rustybuzz = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
|
@ -1,11 +1,11 @@
|
||||
//! Definition of the central compilation context.
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use comemo::{Track, Tracked, TrackedMut, Validate};
|
||||
use ecow::EcoVec;
|
||||
use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator};
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_syntax::{FileId, Span};
|
||||
|
||||
use crate::World;
|
||||
@ -135,7 +135,7 @@ pub struct Sink {
|
||||
/// Warnings emitted during iteration.
|
||||
warnings: EcoVec<SourceDiagnostic>,
|
||||
/// Hashes of all warning's spans and messages for warning deduplication.
|
||||
warnings_set: HashSet<u128>,
|
||||
warnings_set: FxHashSet<u128>,
|
||||
/// A sequence of traced values for a span.
|
||||
values: EcoVec<(Value, Option<Styles>)>,
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use std::sync::Arc;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use indexmap::IndexMap;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use typst_syntax::is_ident;
|
||||
use typst_utils::ArcExt;
|
||||
@ -20,7 +21,7 @@ use crate::foundations::{
|
||||
macro_rules! __dict {
|
||||
($($key:expr => $value:expr),* $(,)?) => {{
|
||||
#[allow(unused_mut)]
|
||||
let mut map = $crate::foundations::IndexMap::new();
|
||||
let mut map = $crate::foundations::IndexMap::default();
|
||||
$(map.insert($key.into(), $crate::foundations::IntoValue::into_value($value));)*
|
||||
$crate::foundations::Dict::from(map)
|
||||
}};
|
||||
@ -66,7 +67,7 @@ pub use crate::__dict as dict;
|
||||
/// ```
|
||||
#[ty(scope, cast, name = "dictionary")]
|
||||
#[derive(Default, Clone, PartialEq)]
|
||||
pub struct Dict(Arc<IndexMap<Str, Value>>);
|
||||
pub struct Dict(Arc<IndexMap<Str, Value, FxBuildHasher>>);
|
||||
|
||||
impl Dict {
|
||||
/// Create a new, empty dictionary.
|
||||
@ -343,7 +344,7 @@ impl<'de> Deserialize<'de> for Dict {
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Ok(IndexMap::<Str, Value>::deserialize(deserializer)?.into())
|
||||
Ok(IndexMap::<Str, Value, FxBuildHasher>::deserialize(deserializer)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
@ -377,8 +378,8 @@ impl<'a> IntoIterator for &'a Dict {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexMap<Str, Value>> for Dict {
|
||||
fn from(map: IndexMap<Str, Value>) -> Self {
|
||||
impl From<IndexMap<Str, Value, FxBuildHasher>> for Dict {
|
||||
fn from(map: IndexMap<Str, Value, FxBuildHasher>) -> Self {
|
||||
Self(Arc::new(map))
|
||||
}
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ use std::hash::{Hash, Hasher};
|
||||
use ecow::{EcoString, eco_format};
|
||||
use indexmap::IndexMap;
|
||||
use indexmap::map::Entry;
|
||||
use rustc_hash::FxBuildHasher;
|
||||
use typst_syntax::Span;
|
||||
|
||||
use crate::diag::{DeprecationSink, HintedStrResult, HintedString, StrResult, bail};
|
||||
@ -102,7 +103,7 @@ impl<'a> Scopes<'a> {
|
||||
/// A map from binding names to values.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct Scope {
|
||||
map: IndexMap<EcoString, Binding>,
|
||||
map: IndexMap<EcoString, Binding, FxBuildHasher>,
|
||||
deduplicate: bool,
|
||||
category: Option<Category>,
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
use std::any::{Any, TypeId};
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::{mem, ptr};
|
||||
|
||||
use comemo::Tracked;
|
||||
use ecow::{EcoString, EcoVec, eco_vec};
|
||||
use rustc_hash::FxHashMap;
|
||||
use smallvec::SmallVec;
|
||||
use typst_syntax::Span;
|
||||
use typst_utils::LazyHash;
|
||||
@ -938,7 +938,7 @@ fn block_wrong_type(func: Element, id: u8, value: &Block) -> ! {
|
||||
|
||||
/// Holds native show rules.
|
||||
pub struct NativeRuleMap {
|
||||
rules: HashMap<(Element, Target), NativeShowRule>,
|
||||
rules: FxHashMap<(Element, Target), NativeShowRule>,
|
||||
}
|
||||
|
||||
/// The signature of a native show rule.
|
||||
@ -956,7 +956,7 @@ impl NativeRuleMap {
|
||||
///
|
||||
/// Contains built-in rules for a few special elements.
|
||||
pub fn new() -> Self {
|
||||
let mut rules = Self { rules: HashMap::new() };
|
||||
let mut rules = Self { rules: FxHashMap::default() };
|
||||
|
||||
// ContextElem is as special as SequenceElem and StyledElem and could,
|
||||
// in theory, also be special cased in realization.
|
||||
|
@ -1,9 +1,10 @@
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::{self, Debug, Display, Formatter, Write};
|
||||
use std::sync::Arc;
|
||||
|
||||
use codex::ModifierSet;
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Serialize, Serializer};
|
||||
use typst_syntax::{Span, Spanned, is_ident};
|
||||
use typst_utils::hash128;
|
||||
@ -221,7 +222,7 @@ impl Symbol {
|
||||
|
||||
// Maps from canonicalized 128-bit hashes to indices of variants we've
|
||||
// seen before.
|
||||
let mut seen = HashMap::<u128, usize>::new();
|
||||
let mut seen = FxHashMap::<u128, usize>::default();
|
||||
|
||||
// A list of modifiers, cleared & reused in each iteration.
|
||||
let mut modifiers = Vec::new();
|
||||
|
@ -1,10 +1,11 @@
|
||||
use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::Hash;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use ecow::{EcoString, EcoVec};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use smallvec::SmallVec;
|
||||
use typst_utils::NonZeroExt;
|
||||
|
||||
@ -31,14 +32,14 @@ pub struct Introspector {
|
||||
keys: MultiMap<u128, Location>,
|
||||
|
||||
/// Accelerates lookup of elements by location.
|
||||
locations: HashMap<Location, usize>,
|
||||
locations: FxHashMap<Location, usize>,
|
||||
/// Accelerates lookup of elements by label.
|
||||
labels: MultiMap<Label, usize>,
|
||||
|
||||
/// Maps from element locations to assigned HTML IDs. This used to support
|
||||
/// intra-doc links in HTML export. In paged export, is is simply left
|
||||
/// empty and [`Self::html_id`] is not used.
|
||||
html_ids: HashMap<Location, EcoString>,
|
||||
html_ids: FxHashMap<Location, EcoString>,
|
||||
|
||||
/// Caches queries done on the introspector. This is important because
|
||||
/// even if all top-level queries are distinct, they often have shared
|
||||
@ -63,7 +64,7 @@ impl Introspector {
|
||||
|
||||
/// Enriches an existing introspector with HTML IDs, which were assigned
|
||||
/// to the DOM in a post-processing step.
|
||||
pub fn set_html_ids(&mut self, html_ids: HashMap<Location, EcoString>) {
|
||||
pub fn set_html_ids(&mut self, html_ids: FxHashMap<Location, EcoString>) {
|
||||
self.html_ids = html_ids;
|
||||
}
|
||||
|
||||
@ -313,7 +314,7 @@ impl Debug for Introspector {
|
||||
|
||||
/// A map from one keys to multiple elements.
|
||||
#[derive(Clone)]
|
||||
struct MultiMap<K, V>(HashMap<K, SmallVec<[V; 1]>>);
|
||||
struct MultiMap<K, V>(FxHashMap<K, SmallVec<[V; 1]>>);
|
||||
|
||||
impl<K, V> MultiMap<K, V>
|
||||
where
|
||||
@ -334,13 +335,13 @@ where
|
||||
|
||||
impl<K, V> Default for MultiMap<K, V> {
|
||||
fn default() -> Self {
|
||||
Self(HashMap::new())
|
||||
Self(FxHashMap::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Caches queries.
|
||||
#[derive(Default)]
|
||||
struct QueryCache(RwLock<HashMap<u128, EcoVec<Content>>>);
|
||||
struct QueryCache(RwLock<FxHashMap<u128, EcoVec<Content>>>);
|
||||
|
||||
impl QueryCache {
|
||||
fn get(&self, hash: u128) -> Option<EcoVec<Content>> {
|
||||
@ -364,11 +365,11 @@ pub struct IntrospectorBuilder {
|
||||
pub pages: usize,
|
||||
pub page_numberings: Vec<Option<Numbering>>,
|
||||
pub page_supplements: Vec<Content>,
|
||||
pub html_ids: HashMap<Location, EcoString>,
|
||||
seen: HashSet<Location>,
|
||||
pub html_ids: FxHashMap<Location, EcoString>,
|
||||
seen: FxHashSet<Location>,
|
||||
insertions: MultiMap<Location, Vec<Pair>>,
|
||||
keys: MultiMap<u128, Location>,
|
||||
locations: HashMap<Location, usize>,
|
||||
locations: FxHashMap<Location, usize>,
|
||||
labels: MultiMap<Label, usize>,
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,9 @@
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::hash::Hash;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use comemo::{Tracked, Validate};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::introspection::{Introspector, Location};
|
||||
|
||||
@ -188,7 +188,7 @@ impl<'a> Locator<'a> {
|
||||
SplitLocator {
|
||||
local: self.local,
|
||||
outer: self.outer,
|
||||
disambiguators: HashMap::new(),
|
||||
disambiguators: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -244,7 +244,7 @@ pub struct SplitLocator<'a> {
|
||||
/// for all the layers beyond the memoization boundary on-demand.
|
||||
outer: Option<&'a LocatorLink<'a>>,
|
||||
/// Simply counts up the number of times we've seen each local hash.
|
||||
disambiguators: HashMap<u128, usize>,
|
||||
disambiguators: FxHashMap<u128, usize>,
|
||||
}
|
||||
|
||||
impl<'a> SplitLocator<'a> {
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::any::TypeId;
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::path::Path;
|
||||
@ -14,6 +13,7 @@ use hayagriva::{
|
||||
SpecificLocator, citationberg,
|
||||
};
|
||||
use indexmap::IndexMap;
|
||||
use rustc_hash::{FxBuildHasher, FxHashMap};
|
||||
use smallvec::{SmallVec, smallvec};
|
||||
use typst_syntax::{Span, Spanned, SyntaxMode};
|
||||
use typst_utils::{ManuallyHash, PicoStr};
|
||||
@ -217,7 +217,9 @@ impl LocalName for Packed<BibliographyElem> {
|
||||
|
||||
/// A loaded bibliography.
|
||||
#[derive(Clone, PartialEq, Hash)]
|
||||
pub struct Bibliography(Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry>>>);
|
||||
pub struct Bibliography(
|
||||
Arc<ManuallyHash<IndexMap<Label, hayagriva::Entry, FxBuildHasher>>>,
|
||||
);
|
||||
|
||||
impl Bibliography {
|
||||
/// Load a bibliography from data sources.
|
||||
@ -234,7 +236,7 @@ impl Bibliography {
|
||||
#[comemo::memoize]
|
||||
#[typst_macros::time(name = "load bibliography")]
|
||||
fn decode(data: &[Loaded]) -> SourceResult<Bibliography> {
|
||||
let mut map = IndexMap::new();
|
||||
let mut map = IndexMap::default();
|
||||
let mut duplicates = Vec::<EcoString>::new();
|
||||
|
||||
// We might have multiple bib/yaml files
|
||||
@ -486,7 +488,7 @@ impl IntoValue for CslSource {
|
||||
/// citations to do it.
|
||||
pub struct Works {
|
||||
/// Maps from the location of a citation group to its rendered content.
|
||||
pub citations: HashMap<Location, SourceResult<Content>>,
|
||||
pub citations: FxHashMap<Location, SourceResult<Content>>,
|
||||
/// Lists all references in the bibliography, with optional prefix, or
|
||||
/// `None` if the citation style can't be used for bibliographies.
|
||||
pub references: Option<Vec<(Option<Content>, Content)>>,
|
||||
@ -528,7 +530,7 @@ struct Generator<'a> {
|
||||
/// bibliography driver and needed when processing hayagriva's output.
|
||||
infos: Vec<GroupInfo>,
|
||||
/// Citations with unresolved keys.
|
||||
failures: HashMap<Location, SourceResult<Content>>,
|
||||
failures: FxHashMap<Location, SourceResult<Content>>,
|
||||
}
|
||||
|
||||
/// Details about a group of merged citations. All citations are put into groups
|
||||
@ -571,7 +573,7 @@ impl<'a> Generator<'a> {
|
||||
bibliography,
|
||||
groups,
|
||||
infos,
|
||||
failures: HashMap::new(),
|
||||
failures: FxHashMap::default(),
|
||||
})
|
||||
}
|
||||
|
||||
@ -702,10 +704,10 @@ impl<'a> Generator<'a> {
|
||||
fn display_citations(
|
||||
&mut self,
|
||||
rendered: &hayagriva::Rendered,
|
||||
) -> StrResult<HashMap<Location, SourceResult<Content>>> {
|
||||
) -> StrResult<FxHashMap<Location, SourceResult<Content>>> {
|
||||
// Determine for each citation key where in the bibliography it is,
|
||||
// so that we can link there.
|
||||
let mut links = HashMap::new();
|
||||
let mut links = FxHashMap::default();
|
||||
if let Some(bibliography) = &rendered.bibliography {
|
||||
let location = self.bibliography.location().unwrap();
|
||||
for (k, item) in bibliography.items.iter().enumerate() {
|
||||
@ -760,7 +762,7 @@ impl<'a> Generator<'a> {
|
||||
|
||||
// Determine for each citation key where it first occurred, so that we
|
||||
// can link there.
|
||||
let mut first_occurrences = HashMap::new();
|
||||
let mut first_occurrences = FxHashMap::default();
|
||||
for info in &self.infos {
|
||||
for subinfo in &info.subinfos {
|
||||
let key = subinfo.key.resolve();
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::diag::Hint;
|
||||
use crate::foundations::{StyleChain, cast};
|
||||
@ -278,13 +278,13 @@ pub fn localized_str(lang: Lang, region: Option<Region>, key: &str) -> &'static
|
||||
fn parse_language_bundle(
|
||||
lang: Lang,
|
||||
region: Option<Region>,
|
||||
) -> Result<HashMap<&'static str, &'static str>, &'static str> {
|
||||
) -> Result<FxHashMap<&'static str, &'static str>, &'static str> {
|
||||
let language_tuple = TRANSLATIONS.iter().find(|it| it.0 == lang_str(lang, region));
|
||||
let Some((_lang_name, language_file)) = language_tuple else {
|
||||
return Ok(HashMap::new());
|
||||
return Ok(FxHashMap::default());
|
||||
};
|
||||
|
||||
let mut bundle = HashMap::new();
|
||||
let mut bundle = FxHashMap::default();
|
||||
let lines = language_file.trim().lines();
|
||||
for line in lines {
|
||||
if line.trim().starts_with('#') {
|
||||
@ -313,9 +313,9 @@ fn lang_str(lang: Lang, region: Option<Region>) -> EcoString {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashSet;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_utils::option_eq;
|
||||
|
||||
use super::*;
|
||||
@ -337,7 +337,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_all_translations_included() {
|
||||
let defined_keys =
|
||||
HashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang));
|
||||
FxHashSet::<&str>::from_iter(TRANSLATIONS.iter().map(|(lang, _)| *lang));
|
||||
let mut checked = 0;
|
||||
for file in translation_files_iter() {
|
||||
assert!(
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use comemo::Tracked;
|
||||
use rustc_hash::FxHashMap;
|
||||
use siphasher::sip128::{Hasher128, SipHasher13};
|
||||
|
||||
use crate::World;
|
||||
@ -144,9 +144,9 @@ struct FontResolver<'a> {
|
||||
/// The active list of font families at the location of the SVG.
|
||||
families: &'a [&'a str],
|
||||
/// A mapping from Typst font indices to fontdb IDs.
|
||||
to_id: HashMap<usize, Option<fontdb::ID>>,
|
||||
to_id: FxHashMap<usize, Option<fontdb::ID>>,
|
||||
/// The reverse mapping.
|
||||
from_id: HashMap<fontdb::ID, Font>,
|
||||
from_id: FxHashMap<fontdb::ID, Font>,
|
||||
/// Accumulates a hash of all used fonts.
|
||||
hasher: SipHasher13,
|
||||
}
|
||||
@ -162,8 +162,8 @@ impl<'a> FontResolver<'a> {
|
||||
book,
|
||||
world,
|
||||
families,
|
||||
to_id: HashMap::new(),
|
||||
from_id: HashMap::new(),
|
||||
to_id: FxHashMap::default(),
|
||||
from_id: FxHashMap::default(),
|
||||
hasher: SipHasher13::new(),
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ image = { workspace = true }
|
||||
infer = { workspace = true }
|
||||
krilla = { workspace = true }
|
||||
krilla-svg = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
|
||||
[lints]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::collections::BTreeMap;
|
||||
use std::num::NonZeroU64;
|
||||
|
||||
use ecow::{EcoVec, eco_format};
|
||||
@ -13,6 +13,7 @@ use krilla::pdf::PdfError;
|
||||
use krilla::surface::Surface;
|
||||
use krilla::{Document, SerializeSettings};
|
||||
use krilla_svg::render_svg_glyph;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use typst_library::diag::{SourceDiagnostic, SourceResult, bail, error};
|
||||
use typst_library::foundations::{NativeElement, Repr};
|
||||
use typst_library::introspection::Location;
|
||||
@ -207,22 +208,22 @@ impl FrameContext {
|
||||
/// Globally needed context for converting a typst document.
|
||||
pub(crate) struct GlobalContext<'a> {
|
||||
/// Cache the conversion between krilla and Typst fonts (forward and backward).
|
||||
pub(crate) fonts_forward: HashMap<Font, krilla::text::Font>,
|
||||
pub(crate) fonts_backward: HashMap<krilla::text::Font, Font>,
|
||||
pub(crate) fonts_forward: FxHashMap<Font, krilla::text::Font>,
|
||||
pub(crate) fonts_backward: FxHashMap<krilla::text::Font, Font>,
|
||||
/// Mapping between images and their span.
|
||||
// Note: In theory, the same image can have multiple spans
|
||||
// if it appears in the document multiple times. We just store the
|
||||
// first appearance, though.
|
||||
pub(crate) image_to_spans: HashMap<krilla::image::Image, Span>,
|
||||
pub(crate) image_to_spans: FxHashMap<krilla::image::Image, Span>,
|
||||
/// The spans of all images that appear in the document. We use this so
|
||||
/// we can give more accurate error messages.
|
||||
pub(crate) image_spans: HashSet<Span>,
|
||||
pub(crate) image_spans: FxHashSet<Span>,
|
||||
/// The document to convert.
|
||||
pub(crate) document: &'a PagedDocument,
|
||||
/// Options for PDF export.
|
||||
pub(crate) options: &'a PdfOptions<'a>,
|
||||
/// Mapping between locations in the document and named destinations.
|
||||
pub(crate) loc_to_names: HashMap<Location, NamedDestination>,
|
||||
pub(crate) loc_to_names: FxHashMap<Location, NamedDestination>,
|
||||
/// The languages used throughout the document.
|
||||
pub(crate) languages: BTreeMap<Lang, usize>,
|
||||
pub(crate) page_index_converter: PageIndexConverter,
|
||||
@ -232,17 +233,17 @@ impl<'a> GlobalContext<'a> {
|
||||
pub(crate) fn new(
|
||||
document: &'a PagedDocument,
|
||||
options: &'a PdfOptions,
|
||||
loc_to_names: HashMap<Location, NamedDestination>,
|
||||
loc_to_names: FxHashMap<Location, NamedDestination>,
|
||||
page_index_converter: PageIndexConverter,
|
||||
) -> GlobalContext<'a> {
|
||||
Self {
|
||||
fonts_forward: HashMap::new(),
|
||||
fonts_backward: HashMap::new(),
|
||||
fonts_forward: FxHashMap::default(),
|
||||
fonts_backward: FxHashMap::default(),
|
||||
document,
|
||||
options,
|
||||
loc_to_names,
|
||||
image_to_spans: HashMap::new(),
|
||||
image_spans: HashSet::new(),
|
||||
image_to_spans: FxHashMap::default(),
|
||||
image_spans: FxHashSet::default(),
|
||||
languages: BTreeMap::new(),
|
||||
page_index_converter,
|
||||
}
|
||||
@ -632,13 +633,13 @@ fn to_span(loc: Option<krilla::surface::Location>) -> Span {
|
||||
fn collect_named_destinations(
|
||||
document: &PagedDocument,
|
||||
pic: &PageIndexConverter,
|
||||
) -> HashMap<Location, NamedDestination> {
|
||||
let mut locs_to_names = HashMap::new();
|
||||
) -> FxHashMap<Location, NamedDestination> {
|
||||
let mut locs_to_names = FxHashMap::default();
|
||||
|
||||
// Find all headings that have a label and are the first among other
|
||||
// headings with the same label.
|
||||
let matches: Vec<_> = {
|
||||
let mut seen = HashSet::new();
|
||||
let mut seen = FxHashSet::default();
|
||||
document
|
||||
.introspector
|
||||
.query(&HeadingElem::ELEM.select())
|
||||
@ -673,13 +674,13 @@ fn collect_named_destinations(
|
||||
}
|
||||
|
||||
pub(crate) struct PageIndexConverter {
|
||||
page_indices: HashMap<usize, usize>,
|
||||
page_indices: FxHashMap<usize, usize>,
|
||||
skipped_pages: usize,
|
||||
}
|
||||
|
||||
impl PageIndexConverter {
|
||||
pub fn new(document: &PagedDocument, options: &PdfOptions) -> Self {
|
||||
let mut page_indices = HashMap::new();
|
||||
let mut page_indices = FxHashMap::default();
|
||||
let mut skipped_pages = 0;
|
||||
|
||||
for i in 0..document.pages.len() {
|
||||
|
@ -24,6 +24,7 @@ ecow = { workspace = true }
|
||||
flate2 = { workspace = true }
|
||||
hayro = { workspace = true }
|
||||
image = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
ttf-parser = { workspace = true }
|
||||
xmlparser = { workspace = true }
|
||||
xmlwriter = { workspace = true }
|
||||
|
@ -6,10 +6,10 @@ mod shape;
|
||||
mod text;
|
||||
|
||||
pub use image::{convert_image_scaling, convert_image_to_base64_url};
|
||||
use rustc_hash::FxHashMap;
|
||||
use typst_library::introspection::Introspector;
|
||||
use typst_library::model::Destination;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Display, Formatter, Write};
|
||||
|
||||
use ecow::EcoString;
|
||||
@ -421,12 +421,16 @@ impl<'a> SVGRenderer<'a> {
|
||||
struct Deduplicator<T> {
|
||||
kind: char,
|
||||
vec: Vec<(u128, T)>,
|
||||
present: HashMap<u128, Id>,
|
||||
present: FxHashMap<u128, Id>,
|
||||
}
|
||||
|
||||
impl<T> Deduplicator<T> {
|
||||
fn new(kind: char) -> Self {
|
||||
Self { kind, vec: Vec::new(), present: HashMap::new() }
|
||||
Self {
|
||||
kind,
|
||||
vec: Vec::new(),
|
||||
present: FxHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Inserts a value into the vector. If the hash is already present, returns
|
||||
|
@ -16,6 +16,7 @@ readme = { workspace = true }
|
||||
typst-timing = { workspace = true }
|
||||
typst-utils = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
toml = { workspace = true }
|
||||
unicode-ident = { workspace = true }
|
||||
|
@ -1,21 +1,22 @@
|
||||
//! File and package management.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Formatter};
|
||||
use std::num::NonZeroU16;
|
||||
use std::sync::{LazyLock, RwLock};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
use crate::VirtualPath;
|
||||
use crate::package::PackageSpec;
|
||||
|
||||
/// The global package-path interner.
|
||||
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
|
||||
RwLock::new(Interner { to_id: HashMap::new(), from_id: Vec::new() })
|
||||
RwLock::new(Interner { to_id: FxHashMap::default(), from_id: Vec::new() })
|
||||
});
|
||||
|
||||
/// A package-path interner.
|
||||
struct Interner {
|
||||
to_id: HashMap<Pair, FileId>,
|
||||
to_id: FxHashMap<Pair, FileId>,
|
||||
from_id: Vec<Pair>,
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::mem;
|
||||
use std::ops::{Index, IndexMut, Range};
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use typst_utils::default_math_class;
|
||||
use unicode_math_class::MathClass;
|
||||
|
||||
@ -481,7 +481,7 @@ fn math_args(p: &mut Parser) {
|
||||
let mut has_arrays = false;
|
||||
|
||||
let mut maybe_array_start = p.marker();
|
||||
let mut seen = HashSet::new();
|
||||
let mut seen = FxHashSet::default();
|
||||
while !p.at_set(syntax_set!(End, Dollar, RightParen)) {
|
||||
positional = math_arg(p, &mut seen);
|
||||
|
||||
@ -522,7 +522,7 @@ fn math_args(p: &mut Parser) {
|
||||
/// Parses a single argument in a math argument list.
|
||||
///
|
||||
/// Returns whether the parsed argument was positional or not.
|
||||
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) -> bool {
|
||||
fn math_arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) -> bool {
|
||||
let m = p.marker();
|
||||
let start = p.current_start();
|
||||
|
||||
@ -831,7 +831,7 @@ fn let_binding(p: &mut Parser) {
|
||||
closure = true;
|
||||
}
|
||||
} else {
|
||||
pattern(p, false, &mut HashSet::new(), None);
|
||||
pattern(p, false, &mut FxHashSet::default(), None);
|
||||
other = true;
|
||||
}
|
||||
|
||||
@ -923,7 +923,7 @@ fn for_loop(p: &mut Parser) {
|
||||
let m = p.marker();
|
||||
p.assert(SyntaxKind::For);
|
||||
|
||||
let mut seen = HashSet::new();
|
||||
let mut seen = FxHashSet::default();
|
||||
pattern(p, false, &mut seen, None);
|
||||
|
||||
if p.at(SyntaxKind::Comma) {
|
||||
@ -1084,7 +1084,7 @@ fn expr_with_paren(p: &mut Parser, atomic: bool) {
|
||||
} else if p.at(SyntaxKind::Eq) && kind != SyntaxKind::Parenthesized {
|
||||
p.restore(checkpoint);
|
||||
let m = p.marker();
|
||||
destructuring_or_parenthesized(p, true, &mut HashSet::new());
|
||||
destructuring_or_parenthesized(p, true, &mut FxHashSet::default());
|
||||
if !p.expect(SyntaxKind::Eq) {
|
||||
return;
|
||||
}
|
||||
@ -1107,7 +1107,7 @@ fn parenthesized_or_array_or_dict(p: &mut Parser) -> SyntaxKind {
|
||||
count: 0,
|
||||
maybe_just_parens: true,
|
||||
kind: None,
|
||||
seen: HashSet::new(),
|
||||
seen: FxHashSet::default(),
|
||||
};
|
||||
|
||||
// An edge case with parens is whether we can interpret a leading spread
|
||||
@ -1169,7 +1169,7 @@ struct GroupState {
|
||||
/// The `SyntaxKind` to wrap as (if we've figured it out yet).
|
||||
kind: Option<SyntaxKind>,
|
||||
/// Store named arguments so we can give an error if they're repeated.
|
||||
seen: HashSet<EcoString>,
|
||||
seen: FxHashSet<EcoString>,
|
||||
}
|
||||
|
||||
/// Parses a single item in an array or dictionary.
|
||||
@ -1238,7 +1238,7 @@ fn args(p: &mut Parser) {
|
||||
p.with_nl_mode(AtNewline::Continue, |p| {
|
||||
p.assert(SyntaxKind::LeftParen);
|
||||
|
||||
let mut seen = HashSet::new();
|
||||
let mut seen = FxHashSet::default();
|
||||
while !p.current().is_terminator() {
|
||||
if !p.at_set(set::ARG) {
|
||||
p.unexpected();
|
||||
@ -1264,7 +1264,7 @@ fn args(p: &mut Parser) {
|
||||
}
|
||||
|
||||
/// Parses a single argument in an argument list.
|
||||
fn arg<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>) {
|
||||
fn arg<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>) {
|
||||
let m = p.marker();
|
||||
|
||||
// Parses a spread argument: `..args`.
|
||||
@ -1301,7 +1301,7 @@ fn params(p: &mut Parser) {
|
||||
p.with_nl_mode(AtNewline::Continue, |p| {
|
||||
p.assert(SyntaxKind::LeftParen);
|
||||
|
||||
let mut seen = HashSet::new();
|
||||
let mut seen = FxHashSet::default();
|
||||
let mut sink = false;
|
||||
|
||||
while !p.current().is_terminator() {
|
||||
@ -1323,7 +1323,7 @@ fn params(p: &mut Parser) {
|
||||
}
|
||||
|
||||
/// Parses a single parameter in a parameter list.
|
||||
fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
|
||||
fn param<'s>(p: &mut Parser<'s>, seen: &mut FxHashSet<&'s str>, sink: &mut bool) {
|
||||
let m = p.marker();
|
||||
|
||||
// Parses argument sink: `..sink`.
|
||||
@ -1358,7 +1358,7 @@ fn param<'s>(p: &mut Parser<'s>, seen: &mut HashSet<&'s str>, sink: &mut bool) {
|
||||
fn pattern<'s>(
|
||||
p: &mut Parser<'s>,
|
||||
reassignment: bool,
|
||||
seen: &mut HashSet<&'s str>,
|
||||
seen: &mut FxHashSet<&'s str>,
|
||||
dupe: Option<&'s str>,
|
||||
) {
|
||||
match p.current() {
|
||||
@ -1372,7 +1372,7 @@ fn pattern<'s>(
|
||||
fn destructuring_or_parenthesized<'s>(
|
||||
p: &mut Parser<'s>,
|
||||
reassignment: bool,
|
||||
seen: &mut HashSet<&'s str>,
|
||||
seen: &mut FxHashSet<&'s str>,
|
||||
) {
|
||||
let mut sink = false;
|
||||
let mut count = 0;
|
||||
@ -1410,7 +1410,7 @@ fn destructuring_or_parenthesized<'s>(
|
||||
fn destructuring_item<'s>(
|
||||
p: &mut Parser<'s>,
|
||||
reassignment: bool,
|
||||
seen: &mut HashSet<&'s str>,
|
||||
seen: &mut FxHashSet<&'s str>,
|
||||
maybe_just_parens: &mut bool,
|
||||
sink: &mut bool,
|
||||
) {
|
||||
@ -1457,7 +1457,7 @@ fn destructuring_item<'s>(
|
||||
fn pattern_leaf<'s>(
|
||||
p: &mut Parser<'s>,
|
||||
reassignment: bool,
|
||||
seen: &mut HashSet<&'s str>,
|
||||
seen: &mut FxHashSet<&'s str>,
|
||||
dupe: Option<&'s str>,
|
||||
) {
|
||||
if p.current().is_keyword() {
|
||||
@ -1920,7 +1920,7 @@ struct MemoArena {
|
||||
/// A map from the parser's current position to a range of previously parsed
|
||||
/// nodes in the arena and a checkpoint of the parser's state. These allow
|
||||
/// us to reset the parser to avoid parsing the same location again.
|
||||
memo_map: HashMap<MemoKey, (Range<usize>, PartialState)>,
|
||||
memo_map: FxHashMap<MemoKey, (Range<usize>, PartialState)>,
|
||||
}
|
||||
|
||||
/// A type alias for the memo key so it doesn't get confused with other usizes.
|
||||
|
@ -16,6 +16,7 @@ readme = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
portable-atomic = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
siphasher = { workspace = true }
|
||||
thin-vec = { workspace = true }
|
||||
unicode-math-class = { workspace = true }
|
||||
|
@ -1,22 +1,24 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::{self, Debug, Display, Formatter};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::NonZeroU64;
|
||||
use std::ops::Deref;
|
||||
use std::sync::{LazyLock, RwLock};
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
|
||||
/// Marks a number as a bitcode encoded `PicoStr``.
|
||||
const MARKER: u64 = 1 << 63;
|
||||
|
||||
/// The global runtime string interner.
|
||||
static INTERNER: LazyLock<RwLock<Interner>> =
|
||||
LazyLock::new(|| RwLock::new(Interner { seen: HashMap::new(), strings: Vec::new() }));
|
||||
static INTERNER: LazyLock<RwLock<Interner>> = LazyLock::new(|| {
|
||||
RwLock::new(Interner { seen: FxHashMap::default(), strings: Vec::new() })
|
||||
});
|
||||
|
||||
/// A string interner.
|
||||
struct Interner {
|
||||
seen: HashMap<&'static str, PicoStr>,
|
||||
seen: FxHashMap<&'static str, PicoStr>,
|
||||
strings: Vec<&'static str>,
|
||||
}
|
||||
|
||||
|
@ -24,6 +24,7 @@ typst-timing = { workspace = true }
|
||||
typst-utils = { workspace = true }
|
||||
comemo = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
@ -38,11 +38,11 @@ pub use typst_syntax as syntax;
|
||||
#[doc(inline)]
|
||||
pub use typst_utils as utils;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use comemo::{Track, Tracked, Validate};
|
||||
use ecow::{EcoString, EcoVec, eco_format, eco_vec};
|
||||
use rustc_hash::FxHashSet;
|
||||
use typst_html::HtmlDocument;
|
||||
use typst_library::diag::{
|
||||
FileError, SourceDiagnostic, SourceResult, Warned, bail, warning,
|
||||
@ -176,7 +176,7 @@ fn compile_impl<D: Document>(
|
||||
|
||||
/// Deduplicate diagnostics.
|
||||
fn deduplicate(mut diags: EcoVec<SourceDiagnostic>) -> EcoVec<SourceDiagnostic> {
|
||||
let mut unique = HashSet::new();
|
||||
let mut unique = FxHashSet::default();
|
||||
diags.retain(|diag| {
|
||||
let hash = typst_utils::hash128(&(&diag.span, &diag.message));
|
||||
unique.insert(hash)
|
||||
|
@ -26,6 +26,7 @@ codex = { workspace = true }
|
||||
ecow = { workspace = true }
|
||||
heck = { workspace = true }
|
||||
pulldown-cmark = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true, optional = true }
|
||||
serde_yaml = { workspace = true }
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::cmp::Reverse;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt::Write;
|
||||
|
||||
use rustc_hash::FxHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{Html, Resolver};
|
||||
@ -12,7 +12,7 @@ pub fn contributors(resolver: &dyn Resolver, from: &str, to: &str) -> Option<Htm
|
||||
let bots = ["dependabot[bot]"];
|
||||
|
||||
// Determine number of contributions per person.
|
||||
let mut contributors = HashMap::<String, Contributor>::new();
|
||||
let mut contributors = FxHashMap::<String, Contributor>::default();
|
||||
for commit in resolver.commits(from, to) {
|
||||
contributors
|
||||
.entry(commit.author.login.clone())
|
||||
|
@ -9,10 +9,9 @@ pub use self::contribs::*;
|
||||
pub use self::html::*;
|
||||
pub use self::model::*;
|
||||
|
||||
use std::collections::HashSet;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use heck::ToTitleCase;
|
||||
use rustc_hash::FxHashSet;
|
||||
use serde::Deserialize;
|
||||
use serde_yaml as yaml;
|
||||
use std::sync::LazyLock;
|
||||
@ -260,7 +259,7 @@ fn category_page(resolver: &dyn Resolver, category: Category) -> PageModel {
|
||||
shorthands = Some(ShorthandsModel { markup, math });
|
||||
}
|
||||
|
||||
let mut skip = HashSet::new();
|
||||
let mut skip = FxHashSet::default();
|
||||
if category == Category::Math {
|
||||
skip = GROUPS
|
||||
.iter()
|
||||
|
@ -46,6 +46,7 @@ oxipng = { workspace = true }
|
||||
parking_lot = { workspace = true }
|
||||
rayon = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
rustc-hash = { workspace = true }
|
||||
tiny-skia = { workspace = true }
|
||||
unscanny = { workspace = true }
|
||||
walkdir = { workspace = true }
|
||||
|
@ -1,4 +1,3 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt::{self, Display, Formatter};
|
||||
use std::ops::Range;
|
||||
use std::path::{Path, PathBuf};
|
||||
@ -6,6 +5,7 @@ use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use ecow::{EcoString, eco_format};
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use typst_syntax::package::PackageVersion;
|
||||
use typst_syntax::{
|
||||
FileId, Lines, Source, VirtualPath, is_id_continue, is_ident, is_newline,
|
||||
@ -122,7 +122,7 @@ impl Display for NoteKind {
|
||||
struct Collector {
|
||||
tests: Vec<Test>,
|
||||
errors: Vec<TestParseError>,
|
||||
seen: HashMap<EcoString, (FilePos, Vec<Attr>)>,
|
||||
seen: FxHashMap<EcoString, (FilePos, Vec<Attr>)>,
|
||||
skipped: usize,
|
||||
}
|
||||
|
||||
@ -132,7 +132,7 @@ impl Collector {
|
||||
Self {
|
||||
tests: vec![],
|
||||
errors: vec![],
|
||||
seen: HashMap::new(),
|
||||
seen: FxHashMap::default(),
|
||||
skipped: 0,
|
||||
}
|
||||
}
|
||||
@ -507,7 +507,7 @@ impl<'a> Parser<'a> {
|
||||
|
||||
/// Whether a test is within the selected set to run.
|
||||
fn selected(name: &str, abs: PathBuf) -> bool {
|
||||
static SKIPPED: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
|
||||
static SKIPPED: LazyLock<FxHashSet<&'static str>> = LazyLock::new(|| {
|
||||
String::leak(std::fs::read_to_string(crate::SKIP_PATH).unwrap())
|
||||
.lines()
|
||||
.map(|line| line.trim())
|
||||
|
@ -1,5 +1,4 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
@ -8,6 +7,7 @@ use std::sync::OnceLock;
|
||||
|
||||
use comemo::Tracked;
|
||||
use parking_lot::Mutex;
|
||||
use rustc_hash::FxHashMap;
|
||||
use typst::diag::{At, FileError, FileResult, SourceResult, StrResult, bail};
|
||||
use typst::engine::Engine;
|
||||
use typst::foundations::{
|
||||
@ -108,7 +108,7 @@ struct TestBase {
|
||||
library: LazyHash<Library>,
|
||||
book: LazyHash<FontBook>,
|
||||
fonts: Vec<Font>,
|
||||
slots: Mutex<HashMap<FileId, FileSlot>>,
|
||||
slots: Mutex<FxHashMap<FileId, FileSlot>>,
|
||||
}
|
||||
|
||||
impl Default for TestBase {
|
||||
@ -122,7 +122,7 @@ impl Default for TestBase {
|
||||
library: LazyHash::new(library()),
|
||||
book: LazyHash::new(FontBook::from_fonts(&fonts)),
|
||||
fonts,
|
||||
slots: Mutex::new(HashMap::new()),
|
||||
slots: Mutex::new(FxHashMap::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user