From e295aebf5677b5fe5febeb8486bf55a88b97120f Mon Sep 17 00:00:00 2001 From: Jeff Date: Wed, 7 Aug 2024 12:13:49 -0400 Subject: [PATCH] Add examples --- dust-lang/src/analyzer.rs | 20 ++++++++++++++++ dust-lang/src/lex.rs | 50 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/dust-lang/src/analyzer.rs b/dust-lang/src/analyzer.rs index cf05232..a8acf07 100644 --- a/dust-lang/src/analyzer.rs +++ b/dust-lang/src/analyzer.rs @@ -1,3 +1,10 @@ +/// Tools for analyzing an abstract syntax tree and catch errors before running the virtual +/// machine. +/// +/// This module provides to anlysis options, both of which borrow an abstract syntax tree and a +/// hash map of variables: +/// - `analyze` convenience function +/// - `Analyzer` struct use std::collections::HashMap; use crate::{AbstractSyntaxTree, Identifier, Node, Statement, Type, Value}; @@ -24,6 +31,19 @@ pub fn analyze( analyzer.analyze() } +/// Static analyzer that checks for potential runtime errors. +/// +/// # Examples +/// ``` +/// # use std::collections::HashMap; +/// # use dust_lang::*; +/// let input = "x = 1 + false"; +/// let abstract_tree = parse(input).unwrap(); +/// let variables = HashMap::new(); +/// let analyzer = Analyzer::new(&abstract_tree, &variables); +/// let result = analyzer.analyze(); +/// +/// assert!(result.is_err()); pub struct Analyzer<'a> { abstract_tree: &'a AbstractSyntaxTree, variables: &'a HashMap, diff --git a/dust-lang/src/lex.rs b/dust-lang/src/lex.rs index 0c30285..4ed88fc 100644 --- a/dust-lang/src/lex.rs +++ b/dust-lang/src/lex.rs @@ -8,6 +8,25 @@ use std::num::{ParseFloatError, ParseIntError}; use crate::{Identifier, ReservedIdentifier, Span, Token}; /// Lex the input and return a vector of tokens and their positions. +/// +/// # Examples +/// ``` +/// # use dust_lang::*; +/// let input = "x = 1 + 2"; +/// let tokens = lex(input).unwrap(); +/// +/// assert_eq!( +/// tokens, +/// [ +/// (Token::Identifier(Identifier::new("x")), (0, 1)), +/// (Token::Equal, (2, 3)), +/// (Token::Integer(1), (4, 5)), +/// (Token::Plus, (6, 7)), +/// (Token::Integer(2), (8, 9)), +/// (Token::Eof, (9, 9)), +/// ] +/// ); +/// ``` pub fn lex(input: &str) -> Result, LexError> { let mut lexer = Lexer::new(input); let mut tokens = Vec::new(); @@ -28,6 +47,37 @@ pub fn lex(input: &str) -> Result, LexError> { #[derive(Debug, Clone)] /// Low-level tool for lexing a single token at a time. +/// +/// # Examples +/// ``` +/// # use dust_lang::*; +/// let input = "x = 1 + 2"; +/// let mut lexer = Lexer::new(input); +/// let mut tokens = Vec::new(); +/// +/// loop { +/// let (token, span) = lexer.next_token().unwrap(); +/// let is_eof = matches!(token, Token::Eof); +/// +/// tokens.push((token, span)); +/// +/// if is_eof { +/// break; +/// } +/// } +/// +/// assert_eq!( +/// tokens, +/// [ +/// (Token::Identifier(Identifier::new("x")), (0, 1)), +/// (Token::Equal, (2, 3)), +/// (Token::Integer(1), (4, 5)), +/// (Token::Plus, (6, 7)), +/// (Token::Integer(2), (8, 9)), +/// (Token::Eof, (9, 9)), +/// ] +/// ) +/// ``` pub struct Lexer<'a> { source: &'a str, position: usize,