Refactor TokenOwned and add some docs

This commit is contained in:
Jeff 2024-08-08 20:19:07 -04:00
parent bf519ec087
commit fa2ce8a0bf
3 changed files with 41 additions and 35 deletions

View File

@ -48,6 +48,8 @@ pub fn lex<'chars, 'src: 'chars>(input: &'src str) -> Result<Vec<(Token<'chars>,
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
/// Low-level tool for lexing a single token at a time. /// Low-level tool for lexing a single token at a time.
/// ///
/// **Note**: It is a logic error to call `next_token` with different inputs.
///
/// # Examples /// # Examples
/// ``` /// ```
/// # use dust_lang::*; /// # use dust_lang::*;
@ -89,6 +91,8 @@ impl Lexer {
} }
/// Produce the next token. /// Produce the next token.
///
/// It is a logic error to call this method with different inputs.
pub fn next_token<'src>(&mut self, source: &'src str) -> Result<(Token<'src>, Span), LexError> { pub fn next_token<'src>(&mut self, source: &'src str) -> Result<(Token<'src>, Span), LexError> {
self.skip_whitespace(source); self.skip_whitespace(source);

View File

@ -230,7 +230,7 @@ impl<'src> Parser<'src> {
Ok(Node::new(node.statement, (left_span.0, right_span.1))) Ok(Node::new(node.statement, (left_span.0, right_span.1)))
} else { } else {
Err(ParseError::ExpectedClosingParenthesis { Err(ParseError::ExpectedClosingParenthesis {
actual: TokenOwned::from(self.current.0), actual: self.current.0.to_owned(),
span: self.current.1, span: self.current.1,
}) })
} }
@ -260,7 +260,7 @@ impl<'src> Parser<'src> {
nodes.push(instruction); nodes.push(instruction);
} else { } else {
return Err(ParseError::ExpectedClosingSquareBrace { return Err(ParseError::ExpectedClosingSquareBrace {
actual: TokenOwned::from(self.current.0), actual: self.current.0.to_owned(),
span: self.current.1, span: self.current.1,
}); });
} }
@ -285,7 +285,7 @@ impl<'src> Parser<'src> {
self.next_token()?; self.next_token()?;
} else { } else {
return Err(ParseError::ExpectedOpeningParenthesis { return Err(ParseError::ExpectedOpeningParenthesis {
actual: TokenOwned::from(self.current.0), actual: self.current.0.to_owned(),
span: self.current.1, span: self.current.1,
}); });
} }
@ -311,7 +311,7 @@ impl<'src> Parser<'src> {
} }
} else { } else {
return Err(ParseError::ExpectedClosingParenthesis { return Err(ParseError::ExpectedClosingParenthesis {
actual: TokenOwned::from(self.current.0), actual: self.current.0.to_owned(),
span: self.current.1, span: self.current.1,
}); });
} }
@ -326,9 +326,7 @@ impl<'src> Parser<'src> {
left_span, left_span,
)) ))
} }
_ => Err(ParseError::UnexpectedToken(TokenOwned::from( _ => Err(ParseError::UnexpectedToken(self.current.0.to_owned())),
self.current.0,
))),
} }
} }

View File

@ -2,7 +2,8 @@ use std::fmt::{self, Display, Formatter};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] /// Source code token.
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub enum Token<'src> { pub enum Token<'src> {
Eof, Eof,
@ -33,6 +34,33 @@ pub enum Token<'src> {
Star, Star,
} }
impl<'src> Token<'src> {
pub fn to_owned(&self) -> TokenOwned {
match self {
Token::Eof => TokenOwned::Eof,
Token::Identifier(text) => TokenOwned::Identifier(text.to_string()),
Token::Boolean(boolean) => TokenOwned::Boolean(*boolean),
Token::Float(float) => TokenOwned::Float(*float),
Token::Integer(integer) => TokenOwned::Integer(*integer),
Token::String(text) => TokenOwned::String(text.to_string()),
Token::IsEven => TokenOwned::IsEven,
Token::IsOdd => TokenOwned::IsOdd,
Token::Length => TokenOwned::Length,
Token::ReadLine => TokenOwned::ReadLine,
Token::WriteLine => TokenOwned::WriteLine,
Token::Comma => TokenOwned::Comma,
Token::Dot => TokenOwned::Dot,
Token::Equal => TokenOwned::Equal,
Token::Plus => TokenOwned::Plus,
Token::Star => TokenOwned::Star,
Token::LeftParenthesis => TokenOwned::LeftParenthesis,
Token::RightParenthesis => TokenOwned::RightParenthesis,
Token::LeftSquareBrace => TokenOwned::LeftSquareBrace,
Token::RightSquareBrace => TokenOwned::RightSquareBrace,
}
}
}
impl<'src> Display for Token<'src> { impl<'src> Display for Token<'src> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result { fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self { match self {
@ -60,6 +88,9 @@ impl<'src> Display for Token<'src> {
} }
} }
/// Owned version of `Token`, which owns all the strings.
///
/// This is used for errors.
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
pub enum TokenOwned { pub enum TokenOwned {
Eof, Eof,
@ -90,30 +121,3 @@ pub enum TokenOwned {
RightSquareBrace, RightSquareBrace,
Star, Star,
} }
impl<'str> From<Token<'str>> for TokenOwned {
fn from(token: Token<'str>) -> Self {
match token {
Token::Eof => TokenOwned::Eof,
Token::Identifier(text) => TokenOwned::Identifier(text.to_string()),
Token::Boolean(boolean) => TokenOwned::Boolean(boolean),
Token::Float(float) => TokenOwned::Float(float),
Token::Integer(integer) => TokenOwned::Integer(integer),
Token::String(text) => TokenOwned::String(text.to_string()),
Token::IsEven => TokenOwned::IsEven,
Token::IsOdd => TokenOwned::IsOdd,
Token::Length => TokenOwned::Length,
Token::ReadLine => TokenOwned::ReadLine,
Token::WriteLine => TokenOwned::WriteLine,
Token::Comma => TokenOwned::Comma,
Token::Dot => TokenOwned::Dot,
Token::Equal => TokenOwned::Equal,
Token::Plus => TokenOwned::Plus,
Token::Star => TokenOwned::Star,
Token::LeftParenthesis => TokenOwned::LeftParenthesis,
Token::RightParenthesis => TokenOwned::RightParenthesis,
Token::LeftSquareBrace => TokenOwned::LeftSquareBrace,
Token::RightSquareBrace => TokenOwned::RightSquareBrace,
}
}
}