Vastly improve documentation 📝

This commit is contained in:
Laurenz 2019-02-19 11:31:03 +01:00
parent c8d3ea89cd
commit b2ea22b25b
4 changed files with 29 additions and 12 deletions

View File

@ -8,7 +8,7 @@ use crate::parsing::{SyntaxTree, Node};
/// Abstract representation of a complete typesetted document. /// Abstract representation of a complete typesetted document.
/// ///
/// This abstract thing can then be serialized into a specific format like PDF. /// This abstract thing can then be serialized into a specific format like _PDF_.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct Document { pub struct Document {
/// The pages of the document. /// The pages of the document.

View File

@ -1,6 +1,24 @@
//! Typeset is a library for compiling documents written in the //! Typeset is a library for compiling documents written in the
//! corresponding typesetting language into a typesetted document in an //! corresponding typesetting language into a typesetted document in an
//! output format like _PDF_. //! output format like _PDF_.
//!
//! # Example
//! This is an example of compiling a _really_ simple document into _PDF_.
//! ```
//! use typeset::{parsing::{Tokenize, Parse}, doc::Generate, export::WritePdf};
//!
//! let path = "hello-typeset.pdf";
//! # let path = "../target/hello-typeset.pdf";
//! let mut file = std::fs::File::create(path).unwrap();
//!
//! // Tokenize, parse and then generate the document.
//! let src = "Hello World from Typeset!";
//! let doc = src.tokenize()
//! .parse().unwrap()
//! .generate().unwrap();
//!
//! file.write_pdf(&doc).unwrap();
//! ```
mod pdf; mod pdf;
mod utility; mod utility;

View File

@ -21,24 +21,24 @@ pub enum Token<'s> {
/// A colon (`:`) indicating the beginning of function arguments. /// A colon (`:`) indicating the beginning of function arguments.
/// ///
/// If a colon occurs outside of the function header, it will be /// If a colon occurs outside of the function header, it will be
/// tokenized as a `Word`. /// tokenized as a [Word](Token::Word).
Colon, Colon,
/// Same as with `Colon`. /// Same as with [Colon](Token::Colon).
Equals, Equals,
/// Two underscores, indicating text in _italics_. /// Two underscores, indicating text in _italics_.
DoubleUnderscore, DoubleUnderscore,
/// Two stars, indicating **bold** text. /// Two stars, indicating **bold** text.
DoubleStar, DoubleStar,
/// A dollar sign, indicating mathematical content. /// A dollar sign, indicating _mathematical_ content.
Dollar, Dollar,
/// A hashtag starting a comment. /// A hashtag starting a _comment_.
Hashtag, Hashtag,
/// Everything else just is a literal word. /// Everything else just is a literal word.
Word(&'s str), Word(&'s str),
} }
/// A type that is seperable into logical units (tokens). /// A type that is separable into logical units (tokens).
pub trait Tokenize { pub trait Tokenize {
/// Tokenize self into logical units. /// Tokenize self into logical units.
fn tokenize<'s>(&'s self) -> Tokens<'s>; fn tokenize<'s>(&'s self) -> Tokens<'s>;
@ -297,7 +297,7 @@ pub struct Function<'s> {
} }
/// A type that is parseable into a syntax tree. /// A type that is parsable into a syntax tree.
pub trait Parse<'s> { pub trait Parse<'s> {
/// Parse self into a syntax tree. /// Parse self into a syntax tree.
fn parse(self) -> ParseResult<SyntaxTree<'s>>; fn parse(self) -> ParseResult<SyntaxTree<'s>>;
@ -559,7 +559,7 @@ mod token_tests {
} }
/// This test has a special look at the double underscore syntax, because /// This test has a special look at the double underscore syntax, because
/// per Unicode standard they are not seperate words and thus harder to parse /// per Unicode standard they are not separate words and thus harder to parse
/// than the stars. /// than the stars.
#[test] #[test]
fn tokenize_double_underscore() { fn tokenize_double_underscore() {

View File

@ -7,8 +7,7 @@ use pdf::{PdfWriter, Id, Rect, Version, DocumentCatalog, PageTree,
/// A type that is a sink for types that can be written conforming /// A type that is a sink for types that can be written conforming
/// to the _PDF_ format (that may be things like sizes, other objects /// to the _PDF_ format.
/// or whole documents).
pub trait WritePdf<T> { pub trait WritePdf<T> {
/// Write self into a byte sink, returning how many bytes were written. /// Write self into a byte sink, returning how many bytes were written.
fn write_pdf(&mut self, object: &T) -> io::Result<usize>; fn write_pdf(&mut self, object: &T) -> io::Result<usize>;
@ -44,7 +43,7 @@ impl<W: Write> WritePdf<Document> for W {
parent: None, parent: None,
kids: (pages_start .. pages_end).collect(), kids: (pages_start .. pages_end).collect(),
data: PageData { data: PageData {
resources: Some(vec![Resource::Font(1, font_start)]), resources: Some(vec![Resource::Font { nr: 1, id: font_start }]),
.. PageData::none() .. PageData::none()
}, },
})?; })?;
@ -81,7 +80,7 @@ impl<W: Write> WritePdf<Document> for W {
writer.write_obj(id, &Text::new() writer.write_obj(id, &Text::new()
.set_font(1, 13.0) .set_font(1, 13.0)
.move_pos(108.0, 734.0) .move_pos(108.0, 734.0)
.write_str(&string) .write_text(&string)
.to_stream() .to_stream()
)?; )?;
id += 1; id += 1;