Forward third-party errors

Better to know something even if it isn't always formatted in the prettiest way
This commit is contained in:
Laurenz 2023-09-11 12:04:37 +02:00
parent 6483d3035b
commit 921b40cf9c
19 changed files with 124 additions and 106 deletions

1
Cargo.lock generated
View File

@ -2925,6 +2925,7 @@ dependencies = [
"codespan-reporting", "codespan-reporting",
"comemo", "comemo",
"dirs", "dirs",
"ecow",
"env_proxy", "env_proxy",
"flate2", "flate2",
"inferno", "inferno",

View File

@ -26,6 +26,7 @@ chrono = { version = "0.4.24", default-features = false, features = ["clock", "s
clap = { version = "4.2.4", features = ["derive", "env"] } clap = { version = "4.2.4", features = ["derive", "env"] }
codespan-reporting = "0.11" codespan-reporting = "0.11"
comemo = "0.3" comemo = "0.3"
ecow = "0.1.1"
dirs = "5" dirs = "5"
flate2 = "1" flate2 = "1"
inferno = "0.11.15" inferno = "0.11.15"

View File

@ -93,7 +93,7 @@ pub fn compile_once(
} }
print_diagnostics(world, &[], &warnings, command.common.diagnostic_format) print_diagnostics(world, &[], &warnings, command.common.diagnostic_format)
.map_err(|_| "failed to print diagnostics")?; .map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
if let Some(open) = command.open.take() { if let Some(open) = command.open.take() {
open_file(open.as_deref(), &command.output())?; open_file(open.as_deref(), &command.output())?;
@ -115,7 +115,7 @@ pub fn compile_once(
&warnings, &warnings,
command.common.diagnostic_format, command.common.diagnostic_format,
) )
.map_err(|_| "failed to print diagnostics")?; .map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
} }
} }
@ -135,7 +135,8 @@ fn export(document: &Document, command: &CompileCommand) -> StrResult<()> {
fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> { fn export_pdf(document: &Document, command: &CompileCommand) -> StrResult<()> {
let output = command.output(); let output = command.output();
let buffer = typst::export::pdf(document); let buffer = typst::export::pdf(document);
fs::write(output, buffer).map_err(|_| "failed to write PDF file")?; fs::write(output, buffer)
.map_err(|err| eco_format!("failed to write PDF file ({err})"))?;
Ok(()) Ok(())
} }
@ -176,11 +177,14 @@ fn export_image(
ImageExportFormat::Png => { ImageExportFormat::Png => {
let pixmap = let pixmap =
typst::export::render(frame, command.ppi / 72.0, Color::WHITE); typst::export::render(frame, command.ppi / 72.0, Color::WHITE);
pixmap.save_png(path).map_err(|_| "failed to write PNG file")?; pixmap
.save_png(path)
.map_err(|err| eco_format!("failed to write PNG file ({err})"))?;
} }
ImageExportFormat::Svg => { ImageExportFormat::Svg => {
let svg = typst::export::svg(frame); let svg = typst::export::svg(frame);
fs::write(path, svg).map_err(|_| "failed to write SVG file")?; fs::write(path, svg)
.map_err(|err| eco_format!("failed to write SVG file ({err})"))?;
} }
} }
} }

View File

@ -35,7 +35,7 @@ fn main() -> ExitCode {
let _guard = match crate::tracing::setup_tracing(&ARGS) { let _guard = match crate::tracing::setup_tracing(&ARGS) {
Ok(guard) => guard, Ok(guard) => guard,
Err(err) => { Err(err) => {
eprintln!("failed to initialize tracing {}", err); eprintln!("failed to initialize tracing ({err})");
None None
} }
}; };

View File

@ -3,6 +3,7 @@ use std::io::{self, Write};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use codespan_reporting::term::{self, termcolor}; use codespan_reporting::term::{self, termcolor};
use ecow::eco_format;
use termcolor::WriteColor; use termcolor::WriteColor;
use typst::diag::{PackageError, PackageResult}; use typst::diag::{PackageError, PackageResult};
use typst::syntax::PackageSpec; use typst::syntax::PackageSpec;
@ -50,18 +51,19 @@ fn download_package(spec: &PackageSpec, package_dir: &Path) -> PackageResult<()>
); );
print_downloading(spec).unwrap(); print_downloading(spec).unwrap();
let data = match download_with_progress(&url) { let data = match download_with_progress(&url) {
Ok(data) => data, Ok(data) => data,
Err(ureq::Error::Status(404, _)) => { Err(ureq::Error::Status(404, _)) => {
return Err(PackageError::NotFound(spec.clone())) return Err(PackageError::NotFound(spec.clone()))
} }
Err(_) => return Err(PackageError::NetworkFailed), Err(err) => return Err(PackageError::NetworkFailed(Some(eco_format!("{err}")))),
}; };
let decompressed = flate2::read::GzDecoder::new(data.as_slice()); let decompressed = flate2::read::GzDecoder::new(data.as_slice());
tar::Archive::new(decompressed).unpack(package_dir).map_err(|_| { tar::Archive::new(decompressed).unpack(package_dir).map_err(|err| {
fs::remove_dir_all(package_dir).ok(); fs::remove_dir_all(package_dir).ok();
PackageError::MalformedArchive PackageError::MalformedArchive(Some(eco_format!("{err}")))
}) })
} }

View File

@ -31,7 +31,7 @@ pub fn query(command: &QueryCommand) -> StrResult<()> {
let serialized = format(data, command)?; let serialized = format(data, command)?;
println!("{serialized}"); println!("{serialized}");
print_diagnostics(&world, &[], &warnings, command.common.diagnostic_format) print_diagnostics(&world, &[], &warnings, command.common.diagnostic_format)
.map_err(|_| "failed to print diagnostics")?; .map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
} }
// Print diagnostics. // Print diagnostics.
@ -43,7 +43,7 @@ pub fn query(command: &QueryCommand) -> StrResult<()> {
&warnings, &warnings,
command.common.diagnostic_format, command.common.diagnostic_format,
) )
.map_err(|_| "failed to print diagnostics")?; .map_err(|err| eco_format!("failed to print diagnostics ({err})"))?;
} }
} }

View File

@ -128,10 +128,10 @@ impl TracingGuard {
impl Drop for TracingGuard { impl Drop for TracingGuard {
fn drop(&mut self) { fn drop(&mut self) {
if !std::thread::panicking() { if !std::thread::panicking() {
if let Err(e) = self.finish() { if let Err(err) = self.finish() {
// Since we are finished, we cannot rely on tracing to log the // Since we are finished, we cannot rely on tracing to log the
// error. // error.
eprintln!("failed to flush tracing flamegraph: {e}"); eprintln!("failed to flush tracing flamegraph ({err})");
} }
} }
} }

View File

@ -51,15 +51,15 @@ pub fn update(command: &UpdateCommand) -> StrResult<()> {
return self_replace::self_replace(&backup_path) return self_replace::self_replace(&backup_path)
.and_then(|_| fs::remove_file(&backup_path)) .and_then(|_| fs::remove_file(&backup_path))
.map_err(|err| eco_format!("failed to revert to backup: {err}")); .map_err(|err| eco_format!("failed to revert to backup ({err})"));
} }
let current_exe = env::current_exe().map_err(|err| { let current_exe = env::current_exe().map_err(|err| {
eco_format!("failed to locate path of the running executable: {err}") eco_format!("failed to locate path of the running executable ({err})")
})?; })?;
fs::copy(current_exe, &backup_path) fs::copy(current_exe, &backup_path)
.map_err(|err| eco_format!("failed to create backup: {err}"))?; .map_err(|err| eco_format!("failed to create backup ({err})"))?;
let release = Release::from_tag(command.version.as_ref())?; let release = Release::from_tag(command.version.as_ref())?;
if !update_needed(&release)? && !command.force { if !update_needed(&release)? && !command.force {
@ -69,14 +69,14 @@ pub fn update(command: &UpdateCommand) -> StrResult<()> {
let binary_data = release.download_binary(needed_asset()?)?; let binary_data = release.download_binary(needed_asset()?)?;
let mut temp_exe = NamedTempFile::new() let mut temp_exe = NamedTempFile::new()
.map_err(|err| eco_format!("failed to create temporary file: {err}"))?; .map_err(|err| eco_format!("failed to create temporary file ({err})"))?;
temp_exe temp_exe
.write_all(&binary_data) .write_all(&binary_data)
.map_err(|err| eco_format!("failed to write binary data: {err}"))?; .map_err(|err| eco_format!("failed to write binary data ({err})"))?;
self_replace::self_replace(&temp_exe).map_err(|err| { self_replace::self_replace(&temp_exe).map_err(|err| {
fs::remove_file(&temp_exe).ok(); fs::remove_file(&temp_exe).ok();
eco_format!("failed to self-replace running executable: {err}") eco_format!("failed to self-replace running executable ({err})")
}) })
} }
@ -118,7 +118,7 @@ impl Release {
Err(ureq::Error::Status(404, _)) => { Err(ureq::Error::Status(404, _)) => {
bail!("release not found (searched at {url})") bail!("release not found (searched at {url})")
} }
Err(_) => bail!("failed to download release (network failed)"), Err(err) => bail!("failed to download release ({err})"),
} }
} }
@ -138,7 +138,7 @@ impl Release {
Err(ureq::Error::Status(404, _)) => { Err(ureq::Error::Status(404, _)) => {
bail!("asset not found (searched for {})", asset.name); bail!("asset not found (searched for {})", asset.name);
} }
Err(_) => bail!("failed to load asset (network failed)"), Err(err) => bail!("failed to download asset ({err})"),
}; };
if asset_name.contains("windows") { if asset_name.contains("windows") {
@ -152,7 +152,7 @@ impl Release {
/// Extract the Typst binary from a ZIP archive. /// Extract the Typst binary from a ZIP archive.
fn extract_binary_from_zip(data: &[u8], asset_name: &str) -> StrResult<Vec<u8>> { fn extract_binary_from_zip(data: &[u8], asset_name: &str) -> StrResult<Vec<u8>> {
let mut archive = ZipArchive::new(Cursor::new(data)) let mut archive = ZipArchive::new(Cursor::new(data))
.map_err(|err| eco_format!("failed to extract ZIP archive: {err}"))?; .map_err(|err| eco_format!("failed to extract ZIP archive ({err})"))?;
let mut file = archive let mut file = archive
.by_name(&format!("{asset_name}/typst.exe")) .by_name(&format!("{asset_name}/typst.exe"))
@ -160,7 +160,7 @@ fn extract_binary_from_zip(data: &[u8], asset_name: &str) -> StrResult<Vec<u8>>
let mut buffer = vec![]; let mut buffer = vec![];
file.read_to_end(&mut buffer).map_err(|err| { file.read_to_end(&mut buffer).map_err(|err| {
eco_format!("failed to read binary data from ZIP archive: {err}") eco_format!("failed to read binary data from ZIP archive ({err})")
})?; })?;
Ok(buffer) Ok(buffer)
@ -172,14 +172,14 @@ fn extract_binary_from_tar_xz(data: &[u8]) -> StrResult<Vec<u8>> {
let mut file = archive let mut file = archive
.entries() .entries()
.map_err(|err| eco_format!("failed to extract tar.xz archive: {err}"))? .map_err(|err| eco_format!("failed to extract tar.xz archive ({err})"))?
.filter_map(Result::ok) .filter_map(Result::ok)
.find(|e| e.path().unwrap_or_default().ends_with("typst")) .find(|e| e.path().unwrap_or_default().ends_with("typst"))
.ok_or("tar.xz archive did not contain Typst binary")?; .ok_or("tar.xz archive did not contain Typst binary")?;
let mut buffer = vec![]; let mut buffer = vec![];
file.read_to_end(&mut buffer).map_err(|err| { file.read_to_end(&mut buffer).map_err(|err| {
eco_format!("failed to read binary data from tar.xz archive: {err}") eco_format!("failed to read binary data from tar.xz archive ({err})")
})?; })?;
Ok(buffer) Ok(buffer)
@ -235,7 +235,7 @@ fn backup_path() -> StrResult<PathBuf> {
let backup_dir = root_backup_dir.join("typst"); let backup_dir = root_backup_dir.join("typst");
fs::create_dir_all(&backup_dir) fs::create_dir_all(&backup_dir)
.map_err(|err| eco_format!("failed to create backup directory: {err}"))?; .map_err(|err| eco_format!("failed to create backup directory ({err})"))?;
Ok(backup_dir.join("typst_backup.part")) Ok(backup_dir.join("typst_backup.part"))
} }

View File

@ -25,7 +25,7 @@ pub fn watch(mut command: CompileCommand) -> StrResult<()> {
// Setup file watching. // Setup file watching.
let (tx, rx) = std::sync::mpsc::channel(); let (tx, rx) = std::sync::mpsc::channel();
let mut watcher = RecommendedWatcher::new(tx, notify::Config::default()) let mut watcher = RecommendedWatcher::new(tx, notify::Config::default())
.map_err(|_| "failed to setup file watching")?; .map_err(|err| eco_format!("failed to setup file watching ({err})"))?;
// Watch all the files that are used by the input file and its dependencies. // Watch all the files that are used by the input file and its dependencies.
watch_dependencies(&mut world, &mut watcher, HashSet::new())?; watch_dependencies(&mut world, &mut watcher, HashSet::new())?;
@ -41,7 +41,8 @@ pub fn watch(mut command: CompileCommand) -> StrResult<()> {
.into_iter() .into_iter()
.chain(std::iter::from_fn(|| rx.recv_timeout(timeout).ok())) .chain(std::iter::from_fn(|| rx.recv_timeout(timeout).ok()))
{ {
let event = event.map_err(|_| "failed to watch directory")?; let event =
event.map_err(|err| eco_format!("failed to watch directory ({err})"))?;
// Workaround for notify-rs' implicit unwatch on remove/rename // Workaround for notify-rs' implicit unwatch on remove/rename
// (triggered by some editors when saving files) with the inotify // (triggered by some editors when saving files) with the inotify
@ -94,7 +95,7 @@ fn watch_dependencies(
tracing::info!("Watching {}", path.display()); tracing::info!("Watching {}", path.display());
watcher watcher
.watch(path, RecursiveMode::NonRecursive) .watch(path, RecursiveMode::NonRecursive)
.map_err(|_| eco_format!("failed to watch {path:?}"))?; .map_err(|err| eco_format!("failed to watch {path:?} ({err})"))?;
} }
} }

View File

@ -1,5 +1,6 @@
use typst::diag::{format_xml_like_error, FileError}; use typst::diag::{format_xml_like_error, FileError};
use typst::eval::Bytes; use typst::eval::Bytes;
use typst::syntax::is_newline;
use crate::prelude::*; use crate::prelude::*;
@ -197,15 +198,16 @@ cast! {
} }
/// Format the user-facing CSV error message. /// Format the user-facing CSV error message.
fn format_csv_error(error: csv::Error, line: usize) -> EcoString { fn format_csv_error(err: csv::Error, line: usize) -> EcoString {
match error.kind() { match err.kind() {
csv::ErrorKind::Utf8 { .. } => "file is not valid utf-8".into(), csv::ErrorKind::Utf8 { .. } => "file is not valid utf-8".into(),
csv::ErrorKind::UnequalLengths { expected_len, len, .. } => { csv::ErrorKind::UnequalLengths { expected_len, len, .. } => {
eco_format!( eco_format!(
"failed to parse csv file: found {len} instead of {expected_len} fields in line {line}" "failed to parse CSV (found {len} instead of \
{expected_len} fields in line {line})"
) )
} }
_ => "failed to parse csv file".into(), _ => eco_format!("failed to parse CSV ({err})"),
} }
} }
@ -278,7 +280,7 @@ pub fn json_decode(
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: data, span } = data; let Spanned { v: data, span } = data;
serde_json::from_slice(data.as_slice()) serde_json::from_slice(data.as_slice())
.map_err(format_json_error) .map_err(|err| eco_format!("failed to parse JSON ({err})"))
.at(span) .at(span)
} }
@ -302,16 +304,10 @@ pub fn json_encode(
serde_json::to_string(&value) serde_json::to_string(&value)
} }
.map(|v| v.into()) .map(|v| v.into())
.map_err(|e| eco_format!("failed to encode value as json: {e}")) .map_err(|err| eco_format!("failed to encode value as JSON ({err})"))
.at(span) .at(span)
} }
/// Format the user-facing JSON error message.
fn format_json_error(error: serde_json::Error) -> EcoString {
assert!(error.is_syntax() || error.is_eof());
eco_format!("failed to parse json file: syntax error in line {}", error.line())
}
/// Reads structured data from a TOML file. /// Reads structured data from a TOML file.
/// ///
/// The file must contain a valid TOML table. TOML tables will be converted into /// The file must contain a valid TOML table. TOML tables will be converted into
@ -366,7 +362,9 @@ pub fn toml_decode(
let raw = std::str::from_utf8(data.as_slice()) let raw = std::str::from_utf8(data.as_slice())
.map_err(|_| "file is not valid utf-8") .map_err(|_| "file is not valid utf-8")
.at(span)?; .at(span)?;
toml::from_str(raw).map_err(format_toml_error).at(span) toml::from_str(raw)
.map_err(|err| format_toml_error(err, raw))
.at(span)
} }
/// Encodes structured data into a TOML string. /// Encodes structured data into a TOML string.
@ -385,21 +383,21 @@ pub fn toml_encode(
let Spanned { v: value, span } = value; let Spanned { v: value, span } = value;
if pretty { toml::to_string_pretty(&value) } else { toml::to_string(&value) } if pretty { toml::to_string_pretty(&value) } else { toml::to_string(&value) }
.map(|v| v.into()) .map(|v| v.into())
.map_err(|e| eco_format!("failed to encode value as toml: {e}")) .map_err(|err| eco_format!("failed to encode value as TOML ({err})"))
.at(span) .at(span)
} }
/// Format the user-facing TOML error message. /// Format the user-facing TOML error message.
fn format_toml_error(error: toml::de::Error) -> EcoString { fn format_toml_error(error: toml::de::Error, raw: &str) -> EcoString {
if let Some(range) = error.span() { if let Some(head) = error.span().and_then(|range| raw.get(..range.start)) {
let line = head.lines().count();
let column = 1 + head.chars().rev().take_while(|&c| !is_newline(c)).count();
eco_format!( eco_format!(
"failed to parse toml file: {}, index {}-{}", "failed to parse TOML ({} at line {line} column {column})",
error.message(), error.message(),
range.start,
range.end
) )
} else { } else {
eco_format!("failed to parse toml file: {}", error.message()) eco_format!("failed to parse TOML ({})", error.message())
} }
} }
@ -464,7 +462,7 @@ pub fn yaml_decode(
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: data, span } = data; let Spanned { v: data, span } = data;
serde_yaml::from_slice(data.as_slice()) serde_yaml::from_slice(data.as_slice())
.map_err(format_yaml_error) .map_err(|err| eco_format!("failed to parse YAML ({err})"))
.at(span) .at(span)
} }
@ -480,15 +478,10 @@ pub fn yaml_encode(
let Spanned { v: value, span } = value; let Spanned { v: value, span } = value;
serde_yaml::to_string(&value) serde_yaml::to_string(&value)
.map(|v| v.into()) .map(|v| v.into())
.map_err(|e| eco_format!("failed to encode value as yaml: {e}")) .map_err(|err| eco_format!("failed to encode value as YAML ({err})"))
.at(span) .at(span)
} }
/// Format the user-facing YAML error message.
fn format_yaml_error(error: serde_yaml::Error) -> EcoString {
eco_format!("failed to parse yaml file: {}", error.to_string().trim())
}
/// Reads structured data from a CBOR file. /// Reads structured data from a CBOR file.
/// ///
/// The file must contain a valid cbor serialization. Mappings will be /// The file must contain a valid cbor serialization. Mappings will be
@ -529,7 +522,7 @@ pub fn cbor_decode(
) -> SourceResult<Value> { ) -> SourceResult<Value> {
let Spanned { v: data, span } = data; let Spanned { v: data, span } = data;
ciborium::from_reader(data.as_slice()) ciborium::from_reader(data.as_slice())
.map_err(|e| eco_format!("failed to parse cbor: {e}")) .map_err(|err| eco_format!("failed to parse CBOR ({err})"))
.at(span) .at(span)
} }
@ -546,7 +539,7 @@ pub fn cbor_encode(
let mut res = Vec::new(); let mut res = Vec::new();
ciborium::into_writer(&value, &mut res) ciborium::into_writer(&value, &mut res)
.map(|_| res.into()) .map(|_| res.into())
.map_err(|e| eco_format!("failed to encode value as cbor: {e}")) .map_err(|err| eco_format!("failed to encode value as CBOR ({err})"))
.at(span) .at(span)
} }
@ -661,5 +654,5 @@ fn convert_xml(node: roxmltree::Node) -> Value {
/// Format the user-facing XML error message. /// Format the user-facing XML error message.
fn format_xml_error(error: roxmltree::Error) -> EcoString { fn format_xml_error(error: roxmltree::Error) -> EcoString {
format_xml_like_error("xml file", error) format_xml_like_error("XML", error)
} }

View File

@ -650,8 +650,8 @@ fn parse_bib(path_str: &str, src: &str) -> StrResult<Vec<hayagriva::Entry>> {
} }
/// Format a Hayagriva loading error. /// Format a Hayagriva loading error.
fn format_hayagriva_error(error: YamlBibliographyError) -> EcoString { fn format_hayagriva_error(err: YamlBibliographyError) -> EcoString {
eco_format!("{error}") eco_format!("{err}")
} }
/// Format a BibLaTeX loading error. /// Format a BibLaTeX loading error.

View File

@ -485,10 +485,9 @@ fn load_syntaxes(paths: &SyntaxPaths, bytes: &[Bytes]) -> StrResult<Arc<SyntaxSe
// We might have multiple sublime-syntax/yaml files // We might have multiple sublime-syntax/yaml files
for (path, bytes) in paths.0.iter().zip(bytes.iter()) { for (path, bytes) in paths.0.iter().zip(bytes.iter()) {
let src = std::str::from_utf8(bytes).map_err(FileError::from)?; let src = std::str::from_utf8(bytes).map_err(FileError::from)?;
out.add( out.add(SyntaxDefinition::load_from_str(src, false, None).map_err(|err| {
SyntaxDefinition::load_from_str(src, false, None) eco_format!("failed to parse syntax file `{path}` ({err})")
.map_err(|e| eco_format!("failed to parse syntax file `{path}`: {e}"))?, })?);
);
} }
Ok(Arc::new(out.build())) Ok(Arc::new(out.build()))
@ -528,7 +527,7 @@ fn load_theme(path: EcoString, bytes: Bytes) -> StrResult<Arc<synt::Theme>> {
synt::ThemeSet::load_from_reader(&mut cursor) synt::ThemeSet::load_from_reader(&mut cursor)
.map(Arc::new) .map(Arc::new)
.map_err(|e| eco_format!("failed to parse theme file `{path}`: {e}")) .map_err(|err| eco_format!("failed to parse theme file `{path}` ({err})"))
} }
/// Function to parse the theme argument. /// Function to parse the theme argument.

View File

@ -320,21 +320,23 @@ pub enum FileError {
/// The package the file is part of could not be loaded. /// The package the file is part of could not be loaded.
Package(PackageError), Package(PackageError),
/// Another error. /// Another error.
Other, ///
/// The optional string can give more details, if available.
Other(Option<EcoString>),
} }
impl FileError { impl FileError {
/// Create a file error from an I/O error. /// Create a file error from an I/O error.
pub fn from_io(error: io::Error, path: &Path) -> Self { pub fn from_io(err: io::Error, path: &Path) -> Self {
match error.kind() { match err.kind() {
io::ErrorKind::NotFound => Self::NotFound(path.into()), io::ErrorKind::NotFound => Self::NotFound(path.into()),
io::ErrorKind::PermissionDenied => Self::AccessDenied, io::ErrorKind::PermissionDenied => Self::AccessDenied,
io::ErrorKind::InvalidData io::ErrorKind::InvalidData
if error.to_string().contains("stream did not contain valid UTF-8") => if err.to_string().contains("stream did not contain valid UTF-8") =>
{ {
Self::InvalidUtf8 Self::InvalidUtf8
} }
_ => Self::Other, _ => Self::Other(Some(eco_format!("{err}"))),
} }
} }
} }
@ -352,7 +354,8 @@ impl Display for FileError {
Self::NotSource => f.pad("not a typst source file"), Self::NotSource => f.pad("not a typst source file"),
Self::InvalidUtf8 => f.pad("file is not valid utf-8"), Self::InvalidUtf8 => f.pad("file is not valid utf-8"),
Self::Package(error) => error.fmt(f), Self::Package(error) => error.fmt(f),
Self::Other => f.pad("failed to load file"), Self::Other(Some(err)) => write!(f, "failed to load file ({err})"),
Self::Other(None) => f.pad("failed to load file"),
} }
} }
} }
@ -370,14 +373,14 @@ impl From<FromUtf8Error> for FileError {
} }
impl From<PackageError> for FileError { impl From<PackageError> for FileError {
fn from(error: PackageError) -> Self { fn from(err: PackageError) -> Self {
Self::Package(error) Self::Package(err)
} }
} }
impl From<FileError> for EcoString { impl From<FileError> for EcoString {
fn from(error: FileError) -> Self { fn from(err: FileError) -> Self {
eco_format!("{error}") eco_format!("{err}")
} }
} }
@ -385,16 +388,18 @@ impl From<FileError> for EcoString {
pub type PackageResult<T> = Result<T, PackageError>; pub type PackageResult<T> = Result<T, PackageError>;
/// An error that occured while trying to load a package. /// An error that occured while trying to load a package.
///
/// Some variants have an optional string can give more details, if available.
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum PackageError { pub enum PackageError {
/// The specified package does not exist. /// The specified package does not exist.
NotFound(PackageSpec), NotFound(PackageSpec),
/// Failed to retrieve the package through the network. /// Failed to retrieve the package through the network.
NetworkFailed, NetworkFailed(Option<EcoString>),
/// The package archive was malformed. /// The package archive was malformed.
MalformedArchive, MalformedArchive(Option<EcoString>),
/// Another error. /// Another error.
Other, Other(Option<EcoString>),
} }
impl std::error::Error for PackageError {} impl std::error::Error for PackageError {}
@ -405,16 +410,25 @@ impl Display for PackageError {
Self::NotFound(spec) => { Self::NotFound(spec) => {
write!(f, "package not found (searched for {spec})",) write!(f, "package not found (searched for {spec})",)
} }
Self::NetworkFailed => f.pad("failed to load package (network failed)"), Self::NetworkFailed(Some(err)) => {
Self::MalformedArchive => f.pad("failed to load package (archive malformed)"), write!(f, "failed to download package ({err})")
Self::Other => f.pad("failed to load package"), }
Self::NetworkFailed(None) => f.pad("failed to download package"),
Self::MalformedArchive(Some(err)) => {
write!(f, "failed to decompress package ({err})")
}
Self::MalformedArchive(None) => {
f.pad("failed to decompress package (archive malformed)")
}
Self::Other(Some(err)) => write!(f, "failed to load package ({err})"),
Self::Other(None) => f.pad("failed to load package"),
} }
} }
} }
impl From<PackageError> for EcoString { impl From<PackageError> for EcoString {
fn from(error: PackageError) -> Self { fn from(err: PackageError) -> Self {
eco_format!("{error}") eco_format!("{err}")
} }
} }
@ -423,26 +437,26 @@ pub fn format_xml_like_error(format: &str, error: roxmltree::Error) -> EcoString
match error { match error {
roxmltree::Error::UnexpectedCloseTag { expected, actual, pos } => { roxmltree::Error::UnexpectedCloseTag { expected, actual, pos } => {
eco_format!( eco_format!(
"failed to parse {format}: found closing tag '{actual}' \ "failed to parse {format} (found closing tag '{actual}' \
instead of '{expected}' in line {}", instead of '{expected}' in line {})",
pos.row pos.row
) )
} }
roxmltree::Error::UnknownEntityReference(entity, pos) => { roxmltree::Error::UnknownEntityReference(entity, pos) => {
eco_format!( eco_format!(
"failed to parse {format}: unknown entity '{entity}' in line {}", "failed to parse {format} (unknown entity '{entity}' in line {})",
pos.row pos.row
) )
} }
roxmltree::Error::DuplicatedAttribute(attr, pos) => { roxmltree::Error::DuplicatedAttribute(attr, pos) => {
eco_format!( eco_format!(
"failed to parse {format}: duplicate attribute '{attr}' in line {}", "failed to parse {format}: (duplicate attribute '{attr}' in line {})",
pos.row pos.row
) )
} }
roxmltree::Error::NoRootNode => { roxmltree::Error::NoRootNode => {
eco_format!("failed to parse {format}: missing root node") eco_format!("failed to parse {format} (missing root node)")
} }
_ => eco_format!("failed to parse {format}"), err => eco_format!("failed to parse {format} ({err})"),
} }
} }

View File

@ -228,7 +228,10 @@ impl Debug for Datetime {
fn format_time_format_error(error: Format) -> EcoString { fn format_time_format_error(error: Format) -> EcoString {
match error { match error {
Format::InvalidComponent(name) => eco_format!("invalid component '{}'", name), Format::InvalidComponent(name) => eco_format!("invalid component '{}'", name),
_ => "failed to format datetime in the requested format".into(), Format::InsufficientTypeInformation { .. } => {
"failed to format datetime (insufficient information)".into()
}
err => eco_format!("failed to format datetime in the requested format ({err})"),
} }
} }
@ -263,6 +266,6 @@ fn format_time_invalid_format_description_error(
InvalidFormatDescription::NotSupported { context, what, index, .. } => { InvalidFormatDescription::NotSupported { context, what, index, .. } => {
eco_format!("{} is not supported in {} at index {}", what, context, index) eco_format!("{} is not supported in {} at index {}", what, context, index)
} }
_ => "failed to parse datetime format".into(), err => eco_format!("failed to parse datetime format ({err})"),
} }
} }

View File

@ -42,7 +42,7 @@ impl Plugin {
pub fn new(bytes: Bytes) -> StrResult<Self> { pub fn new(bytes: Bytes) -> StrResult<Self> {
let engine = Engine::default(); let engine = Engine::default();
let module = Module::new(&engine, bytes.as_slice()) let module = Module::new(&engine, bytes.as_slice())
.map_err(|err| format!("failed to load WebAssembly module: {err}"))?; .map_err(|err| format!("failed to load WebAssembly module ({err})"))?;
let mut linker = Linker::new(&engine); let mut linker = Linker::new(&engine);
linker linker

View File

@ -8,7 +8,7 @@ use std::rc::Rc;
use std::sync::Arc; use std::sync::Arc;
use comemo::{Prehashed, Track, Tracked}; use comemo::{Prehashed, Track, Tracked};
use ecow::{EcoString, EcoVec}; use ecow::{eco_format, EcoString, EcoVec};
use image::codecs::gif::GifDecoder; use image::codecs::gif::GifDecoder;
use image::codecs::jpeg::JpegDecoder; use image::codecs::jpeg::JpegDecoder;
use image::codecs::png::PngDecoder; use image::codecs::png::PngDecoder;
@ -463,7 +463,7 @@ impl SvgFontLoader for PreparedLoader {
fn format_image_error(error: image::ImageError) -> EcoString { fn format_image_error(error: image::ImageError) -> EcoString {
match error { match error {
image::ImageError::Limits(_) => "file is too large".into(), image::ImageError::Limits(_) => "file is too large".into(),
_ => "failed to decode image".into(), err => eco_format!("failed to decode image ({err})"),
} }
} }
@ -474,8 +474,8 @@ fn format_usvg_error(error: usvg::Error) -> EcoString {
usvg::Error::MalformedGZip => "file is not compressed correctly".into(), usvg::Error::MalformedGZip => "file is not compressed correctly".into(),
usvg::Error::ElementsLimitReached => "file is too large".into(), usvg::Error::ElementsLimitReached => "file is too large".into(),
usvg::Error::InvalidSize => { usvg::Error::InvalidSize => {
"failed to parse svg: width, height, or viewbox is invalid".into() "failed to parse SVG (width, height, or viewbox is invalid)".into()
} }
usvg::Error::ParsingFailed(error) => format_xml_like_error("svg", error), usvg::Error::ParsingFailed(error) => format_xml_like_error("SVG", error),
} }
} }

View File

@ -221,5 +221,5 @@
#datetime.today().display(" []") #datetime.today().display(" []")
--- ---
// Error: 26-36 failed to format datetime in the requested format // Error: 26-36 failed to format datetime (insufficient information)
#datetime.today().display("[hour]") #datetime.today().display("[hour]")

View File

@ -27,7 +27,7 @@
#csv("nope.csv") #csv("nope.csv")
--- ---
// Error: 6-22 failed to parse csv file: found 3 instead of 2 fields in line 3 // Error: 6-22 failed to parse CSV (found 3 instead of 2 fields in line 3)
#csv("/files/bad.csv") #csv("/files/bad.csv")
--- ---
@ -38,7 +38,7 @@
#test(data.at(2).weight, 150) #test(data.at(2).weight, 150)
--- ---
// Error: 7-24 failed to parse json file: syntax error in line 3 // Error: 7-24 failed to parse JSON (expected value at line 3 column 14)
#json("/files/bad.json") #json("/files/bad.json")
--- ---
@ -80,7 +80,7 @@
)) ))
--- ---
// Error: 7-24 failed to parse toml file: expected `.`, `=`, index 15-16 // Error: 7-24 failed to parse TOML (expected `.`, `=` at line 1 column 16)
#toml("/files/bad.toml") #toml("/files/bad.toml")
--- ---
@ -98,7 +98,7 @@
#test(data.at("1"), "ok") #test(data.at("1"), "ok")
--- ---
// Error: 7-24 failed to parse yaml file: while parsing a flow sequence, expected ',' or ']' at line 2 column 1 // Error: 7-24 failed to parse YAML (while parsing a flow sequence, expected ',' or ']' at line 2 column 1)
#yaml("/files/bad.yaml") #yaml("/files/bad.yaml")
--- ---
@ -127,5 +127,5 @@
),)) ),))
--- ---
// Error: 6-22 failed to parse xml file: found closing tag 'data' instead of 'hello' in line 3 // Error: 6-22 failed to parse XML (found closing tag 'data' instead of 'hello' in line 3)
#xml("/files/bad.xml") #xml("/files/bad.xml")

View File

@ -58,7 +58,7 @@ A #box(image("/files/tiger.jpg", height: 1cm, width: 80%)) B
#image("./image.typ") #image("./image.typ")
--- ---
// Error: 2-25 failed to parse svg: found closing tag 'g' instead of 'style' in line 4 // Error: 2-25 failed to parse SVG (found closing tag 'g' instead of 'style' in line 4)
#image("/files/bad.svg") #image("/files/bad.svg")
--- ---
@ -66,7 +66,7 @@ A #box(image("/files/tiger.jpg", height: 1cm, width: 80%)) B
#image.decode(`<svg xmlns="http://www.w3.org/2000/svg" height="140" width="500"><ellipse cx="200" cy="80" rx="100" ry="50" style="fill:yellow;stroke:purple;stroke-width:2" /></svg>`.text, format: "svg") #image.decode(`<svg xmlns="http://www.w3.org/2000/svg" height="140" width="500"><ellipse cx="200" cy="80" rx="100" ry="50" style="fill:yellow;stroke:purple;stroke-width:2" /></svg>`.text, format: "svg")
--- ---
// Error: 2-168 failed to parse svg: missing root node // Error: 2-168 failed to parse SVG (missing root node)
#image.decode(`<svg height="140" width="500"><ellipse cx="200" cy="80" rx="100" ry="50" style="fill:yellow;stroke:purple;stroke-width:2" /></svg>`.text, format: "svg") #image.decode(`<svg height="140" width="500"><ellipse cx="200" cy="80" rx="100" ry="50" style="fill:yellow;stroke:purple;stroke-width:2" /></svg>`.text, format: "svg")
--- ---
@ -78,5 +78,5 @@ A #box(image("/files/tiger.jpg", height: 1cm, width: 80%)) B
#image.decode(read("/files/tiger.jpg", encoding: none), format: "jpg", width: 80%) #image.decode(read("/files/tiger.jpg", encoding: none), format: "jpg", width: 80%)
--- ---
// Error: 2-83 failed to decode image // Error: 2-83 failed to decode image (Format error decoding Png: Invalid PNG signature.)
#image.decode(read("/files/tiger.jpg", encoding: none), format: "png", width: 80%) #image.decode(read("/files/tiger.jpg", encoding: none), format: "png", width: 80%)