From b81c65629bc9a80997a7ec9947fe4be1bba68153 Mon Sep 17 00:00:00 2001 From: Jeff Date: Mon, 5 Aug 2024 00:54:12 -0400 Subject: [PATCH] Add docs --- dust-lang/src/lex.rs | 14 ++++++++++++++ dust-lang/src/lib.rs | 16 +++++++--------- 2 files changed, 21 insertions(+), 9 deletions(-) diff --git a/dust-lang/src/lex.rs b/dust-lang/src/lex.rs index cd19b3f..49be36b 100644 --- a/dust-lang/src/lex.rs +++ b/dust-lang/src/lex.rs @@ -1,7 +1,13 @@ +//! Lexing tools. +//! +//! This module provides two lexing options: +//! - [`lex`], which lexes the entire input and returns a vector of tokens and their positions +//! - [`Lexer`], which lexes the input a token at a time use std::num::{ParseFloatError, ParseIntError}; use crate::{Identifier, Span, Token}; +/// Lex the input and return a vector of tokens and their positions. pub fn lex(input: &str) -> Result, LexError> { let mut lexer = Lexer::new(input); let mut tokens = Vec::new(); @@ -21,12 +27,14 @@ pub fn lex(input: &str) -> Result, LexError> { } #[derive(Debug, Clone)] +/// Low-level tool for lexing a single token at a time. pub struct Lexer<'a> { source: &'a str, position: usize, } impl<'a> Lexer<'a> { + /// Create a new lexer for the given input. pub fn new(input: &'a str) -> Self { Lexer { source: input, @@ -34,6 +42,7 @@ impl<'a> Lexer<'a> { } } + /// Progress to the next character. fn next_char(&mut self) -> Option { self.source[self.position..].chars().next().map(|c| { self.position += c.len_utf8(); @@ -41,6 +50,7 @@ impl<'a> Lexer<'a> { }) } + /// Produce the next token. pub fn next_token(&mut self) -> Result<(Token, Span), LexError> { self.skip_whitespace(); @@ -89,6 +99,7 @@ impl<'a> Lexer<'a> { Ok((token, span)) } + /// Skip whitespace characters. fn skip_whitespace(&mut self) { while let Some(c) = self.peek_char() { if c.is_whitespace() { @@ -99,10 +110,12 @@ impl<'a> Lexer<'a> { } } + /// Peek at the next character without consuming it. fn peek_char(&self) -> Option { self.source[self.position..].chars().next() } + /// Lex an integer or float token. fn lex_number(&mut self) -> Result<(Token, Span), LexError> { let start_pos = self.position; let mut is_float = false; @@ -140,6 +153,7 @@ impl<'a> Lexer<'a> { } } + /// Lex an identifier token. fn lex_identifier(&mut self) -> Result<(Token, Span), LexError> { let start_pos = self.position; diff --git a/dust-lang/src/lib.rs b/dust-lang/src/lib.rs index ae9f89a..6d7db60 100644 --- a/dust-lang/src/lib.rs +++ b/dust-lang/src/lib.rs @@ -1,12 +1,10 @@ -/** -The Dust programming language. - -Dust is a statically typed, interpreted programming language. - -The [interpreter] module contains the `Interpreter` struct, which is used to lex, parse and/or -interpret Dust code. The `interpret` function is a convenience function that creates a new -`Interpreter` and runs the given source code. -*/ +//! The Dust programming language. +//! +//! Dust is a statically typed, interpreted programming language. +//! +//! The [interpreter] module contains the `Interpreter` struct, which is used to lex, parse and/or +//! interpret Dust code. The `interpret` function is a convenience function that creates a new +//! `Interpreter` and runs the given source code. pub mod abstract_tree; pub mod analyzer; pub mod identifier;