mirror of
https://github.com/typst/typst
synced 2025-08-11 13:47:55 +08:00
Compare commits
3 Commits
024bbb2b46
...
c247dbc42d
Author | SHA1 | Date | |
---|---|---|---|
|
c247dbc42d | ||
|
c259545c6e | ||
|
e470ccff19 |
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -2901,6 +2901,8 @@ dependencies = [
|
|||||||
"native-tls",
|
"native-tls",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"openssl",
|
"openssl",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
"tar",
|
"tar",
|
||||||
"typst-assets",
|
"typst-assets",
|
||||||
"typst-library",
|
"typst-library",
|
||||||
|
@ -23,6 +23,8 @@ flate2 = { workspace = true, optional = true }
|
|||||||
fontdb = { workspace = true, optional = true }
|
fontdb = { workspace = true, optional = true }
|
||||||
native-tls = { workspace = true, optional = true }
|
native-tls = { workspace = true, optional = true }
|
||||||
once_cell = { workspace = true }
|
once_cell = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
tar = { workspace = true, optional = true }
|
tar = { workspace = true, optional = true }
|
||||||
ureq = { workspace = true, optional = true }
|
ureq = { workspace = true, optional = true }
|
||||||
|
|
||||||
|
@ -5,10 +5,9 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use ecow::eco_format;
|
use ecow::eco_format;
|
||||||
use once_cell::sync::OnceCell;
|
use once_cell::sync::OnceCell;
|
||||||
|
use serde::Deserialize;
|
||||||
use typst_library::diag::{bail, PackageError, PackageResult, StrResult};
|
use typst_library::diag::{bail, PackageError, PackageResult, StrResult};
|
||||||
use typst_syntax::package::{
|
use typst_syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec};
|
||||||
PackageInfo, PackageSpec, PackageVersion, VersionlessPackageSpec,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::download::{Downloader, Progress};
|
use crate::download::{Downloader, Progress};
|
||||||
|
|
||||||
@ -32,7 +31,7 @@ pub struct PackageStorage {
|
|||||||
/// The downloader used for fetching the index and packages.
|
/// The downloader used for fetching the index and packages.
|
||||||
downloader: Downloader,
|
downloader: Downloader,
|
||||||
/// The cached index of the default namespace.
|
/// The cached index of the default namespace.
|
||||||
index: OnceCell<Vec<PackageInfo>>,
|
index: OnceCell<Vec<serde_json::Value>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PackageStorage {
|
impl PackageStorage {
|
||||||
@ -42,6 +41,18 @@ impl PackageStorage {
|
|||||||
package_cache_path: Option<PathBuf>,
|
package_cache_path: Option<PathBuf>,
|
||||||
package_path: Option<PathBuf>,
|
package_path: Option<PathBuf>,
|
||||||
downloader: Downloader,
|
downloader: Downloader,
|
||||||
|
) -> Self {
|
||||||
|
Self::with_index(package_cache_path, package_path, downloader, OnceCell::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a new package storage with a pre-defined index.
|
||||||
|
///
|
||||||
|
/// Useful for testing.
|
||||||
|
fn with_index(
|
||||||
|
package_cache_path: Option<PathBuf>,
|
||||||
|
package_path: Option<PathBuf>,
|
||||||
|
downloader: Downloader,
|
||||||
|
index: OnceCell<Vec<serde_json::Value>>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
package_cache_path: package_cache_path.or_else(|| {
|
package_cache_path: package_cache_path.or_else(|| {
|
||||||
@ -51,7 +62,7 @@ impl PackageStorage {
|
|||||||
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
dirs::data_dir().map(|data_dir| data_dir.join(DEFAULT_PACKAGES_SUBDIR))
|
||||||
}),
|
}),
|
||||||
downloader,
|
downloader,
|
||||||
index: OnceCell::new(),
|
index,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,6 +120,7 @@ impl PackageStorage {
|
|||||||
// version.
|
// version.
|
||||||
self.download_index()?
|
self.download_index()?
|
||||||
.iter()
|
.iter()
|
||||||
|
.filter_map(|value| MinimalPackageInfo::deserialize(value).ok())
|
||||||
.filter(|package| package.name == spec.name)
|
.filter(|package| package.name == spec.name)
|
||||||
.map(|package| package.version)
|
.map(|package| package.version)
|
||||||
.max()
|
.max()
|
||||||
@ -131,7 +143,7 @@ impl PackageStorage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Download the package index. The result of this is cached for efficiency.
|
/// Download the package index. The result of this is cached for efficiency.
|
||||||
pub fn download_index(&self) -> StrResult<&[PackageInfo]> {
|
pub fn download_index(&self) -> StrResult<&[serde_json::Value]> {
|
||||||
self.index
|
self.index
|
||||||
.get_or_try_init(|| {
|
.get_or_try_init(|| {
|
||||||
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
|
let url = format!("{DEFAULT_REGISTRY}/{DEFAULT_NAMESPACE}/index.json");
|
||||||
@ -186,3 +198,54 @@ impl PackageStorage {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Minimal information required about a package to determine its latest
|
||||||
|
/// version.
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct MinimalPackageInfo {
|
||||||
|
name: String,
|
||||||
|
version: PackageVersion,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn lazy_deser_index() {
|
||||||
|
let storage = PackageStorage::with_index(
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
Downloader::new("typst/test"),
|
||||||
|
OnceCell::with_value(vec![
|
||||||
|
serde_json::json!({
|
||||||
|
"name": "charged-ieee",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"entrypoint": "lib.typ",
|
||||||
|
}),
|
||||||
|
serde_json::json!({
|
||||||
|
"name": "unequivocal-ams",
|
||||||
|
// This version number is currently not valid, so this package
|
||||||
|
// can't be parsed.
|
||||||
|
"version": "0.2.0-dev",
|
||||||
|
"entrypoint": "lib.typ",
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
let ieee_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
||||||
|
namespace: "preview".into(),
|
||||||
|
name: "charged-ieee".into(),
|
||||||
|
});
|
||||||
|
assert_eq!(ieee_version, Ok(PackageVersion { major: 0, minor: 1, patch: 0 }));
|
||||||
|
|
||||||
|
let ams_version = storage.determine_latest_version(&VersionlessPackageSpec {
|
||||||
|
namespace: "preview".into(),
|
||||||
|
name: "unequivocal-ams".into(),
|
||||||
|
});
|
||||||
|
assert_eq!(
|
||||||
|
ams_version,
|
||||||
|
Err("failed to find package @preview/unequivocal-ams".into())
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -110,7 +110,7 @@ impl f64 {
|
|||||||
f64::signum(self)
|
f64::signum(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts bytes to a float.
|
/// Interprets bytes as a float.
|
||||||
///
|
///
|
||||||
/// ```example
|
/// ```example
|
||||||
/// #float.from-bytes(bytes((0, 0, 0, 0, 0, 0, 240, 63))) \
|
/// #float.from-bytes(bytes((0, 0, 0, 0, 0, 0, 240, 63))) \
|
||||||
@ -120,8 +120,10 @@ impl f64 {
|
|||||||
pub fn from_bytes(
|
pub fn from_bytes(
|
||||||
/// The bytes that should be converted to a float.
|
/// The bytes that should be converted to a float.
|
||||||
///
|
///
|
||||||
/// Must be of length exactly 8 so that the result fits into a 64-bit
|
/// Must have a length of either 4 or 8. The bytes are then
|
||||||
/// float.
|
/// interpreted in [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754)'s
|
||||||
|
/// binary32 (single-precision) or binary64 (double-precision) format
|
||||||
|
/// depending on the length of the bytes.
|
||||||
bytes: Bytes,
|
bytes: Bytes,
|
||||||
/// The endianness of the conversion.
|
/// The endianness of the conversion.
|
||||||
#[named]
|
#[named]
|
||||||
@ -158,6 +160,13 @@ impl f64 {
|
|||||||
#[named]
|
#[named]
|
||||||
#[default(Endianness::Little)]
|
#[default(Endianness::Little)]
|
||||||
endian: Endianness,
|
endian: Endianness,
|
||||||
|
/// The size of the resulting bytes.
|
||||||
|
///
|
||||||
|
/// This must be either 4 or 8. The call will return the
|
||||||
|
/// representation of this float in either
|
||||||
|
/// [IEEE 754](https://en.wikipedia.org/wiki/IEEE_754)'s binary32
|
||||||
|
/// (single-precision) or binary64 (double-precision) format
|
||||||
|
/// depending on the provided size.
|
||||||
#[named]
|
#[named]
|
||||||
#[default(8)]
|
#[default(8)]
|
||||||
size: u32,
|
size: u32,
|
||||||
|
@ -582,12 +582,11 @@ impl Gradient {
|
|||||||
let mut stops = stops
|
let mut stops = stops
|
||||||
.iter()
|
.iter()
|
||||||
.map(move |&(color, offset)| {
|
.map(move |&(color, offset)| {
|
||||||
let t = i as f64 / n as f64;
|
|
||||||
let r = offset.get();
|
let r = offset.get();
|
||||||
if i % 2 == 1 && mirror {
|
if i % 2 == 1 && mirror {
|
||||||
(color, Ratio::new(t + (1.0 - r) / n as f64))
|
(color, Ratio::new((i as f64 + 1.0 - r) / n as f64))
|
||||||
} else {
|
} else {
|
||||||
(color, Ratio::new(t + r / n as f64))
|
(color, Ratio::new((i as f64 + r) / n as f64))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
@ -1230,7 +1229,7 @@ fn process_stops(stops: &[Spanned<GradientStop>]) -> SourceResult<Vec<(Color, Ra
|
|||||||
};
|
};
|
||||||
|
|
||||||
if stop.get() < last_stop {
|
if stop.get() < last_stop {
|
||||||
bail!(*span, "offsets must be in strictly monotonic order");
|
bail!(*span, "offsets must be in monotonic order");
|
||||||
}
|
}
|
||||||
|
|
||||||
last_stop = stop.get();
|
last_stop = stop.get();
|
||||||
|
@ -658,3 +658,11 @@ $ A = mat(
|
|||||||
height: 10pt,
|
height: 10pt,
|
||||||
fill: gradient.linear(violet, blue, space: cmyk)
|
fill: gradient.linear(violet, blue, space: cmyk)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
--- issue-5819-gradient-repeat ---
|
||||||
|
// Ensure the gradient constructor generates monotonic stops which can be fed
|
||||||
|
// back into the gradient constructor itself.
|
||||||
|
#let my-gradient = gradient.linear(red, blue).repeat(5)
|
||||||
|
#let _ = gradient.linear(..my-gradient.stops())
|
||||||
|
#let my-gradient2 = gradient.linear(red, blue).repeat(5, mirror: true)
|
||||||
|
#let _ = gradient.linear(..my-gradient2.stops())
|
||||||
|
Loading…
x
Reference in New Issue
Block a user