diff --git a/dust-lang/src/lex.rs b/dust-lang/src/lex.rs index 66538b5..530eb36 100644 --- a/dust-lang/src/lex.rs +++ b/dust-lang/src/lex.rs @@ -48,6 +48,8 @@ pub fn lex<'chars, 'src: 'chars>(input: &'src str) -> Result, #[derive(Debug, Clone)] /// Low-level tool for lexing a single token at a time. /// +/// **Note**: It is a logic error to call `next_token` with different inputs. +/// /// # Examples /// ``` /// # use dust_lang::*; @@ -89,6 +91,8 @@ impl Lexer { } /// Produce the next token. + /// + /// It is a logic error to call this method with different inputs. pub fn next_token<'src>(&mut self, source: &'src str) -> Result<(Token<'src>, Span), LexError> { self.skip_whitespace(source); diff --git a/dust-lang/src/parse.rs b/dust-lang/src/parse.rs index d15e8ca..a8a061b 100644 --- a/dust-lang/src/parse.rs +++ b/dust-lang/src/parse.rs @@ -230,7 +230,7 @@ impl<'src> Parser<'src> { Ok(Node::new(node.statement, (left_span.0, right_span.1))) } else { Err(ParseError::ExpectedClosingParenthesis { - actual: TokenOwned::from(self.current.0), + actual: self.current.0.to_owned(), span: self.current.1, }) } @@ -260,7 +260,7 @@ impl<'src> Parser<'src> { nodes.push(instruction); } else { return Err(ParseError::ExpectedClosingSquareBrace { - actual: TokenOwned::from(self.current.0), + actual: self.current.0.to_owned(), span: self.current.1, }); } @@ -285,7 +285,7 @@ impl<'src> Parser<'src> { self.next_token()?; } else { return Err(ParseError::ExpectedOpeningParenthesis { - actual: TokenOwned::from(self.current.0), + actual: self.current.0.to_owned(), span: self.current.1, }); } @@ -311,7 +311,7 @@ impl<'src> Parser<'src> { } } else { return Err(ParseError::ExpectedClosingParenthesis { - actual: TokenOwned::from(self.current.0), + actual: self.current.0.to_owned(), span: self.current.1, }); } @@ -326,9 +326,7 @@ impl<'src> Parser<'src> { left_span, )) } - _ => Err(ParseError::UnexpectedToken(TokenOwned::from( - self.current.0, - ))), + _ => Err(ParseError::UnexpectedToken(self.current.0.to_owned())), } } diff --git a/dust-lang/src/token.rs b/dust-lang/src/token.rs index 25ea17a..55849a2 100644 --- a/dust-lang/src/token.rs +++ b/dust-lang/src/token.rs @@ -2,7 +2,8 @@ use std::fmt::{self, Display, Formatter}; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +/// Source code token. +#[derive(Debug, PartialEq, Serialize, Deserialize)] pub enum Token<'src> { Eof, @@ -33,6 +34,33 @@ pub enum Token<'src> { Star, } +impl<'src> Token<'src> { + pub fn to_owned(&self) -> TokenOwned { + match self { + Token::Eof => TokenOwned::Eof, + Token::Identifier(text) => TokenOwned::Identifier(text.to_string()), + Token::Boolean(boolean) => TokenOwned::Boolean(*boolean), + Token::Float(float) => TokenOwned::Float(*float), + Token::Integer(integer) => TokenOwned::Integer(*integer), + Token::String(text) => TokenOwned::String(text.to_string()), + Token::IsEven => TokenOwned::IsEven, + Token::IsOdd => TokenOwned::IsOdd, + Token::Length => TokenOwned::Length, + Token::ReadLine => TokenOwned::ReadLine, + Token::WriteLine => TokenOwned::WriteLine, + Token::Comma => TokenOwned::Comma, + Token::Dot => TokenOwned::Dot, + Token::Equal => TokenOwned::Equal, + Token::Plus => TokenOwned::Plus, + Token::Star => TokenOwned::Star, + Token::LeftParenthesis => TokenOwned::LeftParenthesis, + Token::RightParenthesis => TokenOwned::RightParenthesis, + Token::LeftSquareBrace => TokenOwned::LeftSquareBrace, + Token::RightSquareBrace => TokenOwned::RightSquareBrace, + } + } +} + impl<'src> Display for Token<'src> { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match self { @@ -60,6 +88,9 @@ impl<'src> Display for Token<'src> { } } +/// Owned version of `Token`, which owns all the strings. +/// +/// This is used for errors. #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum TokenOwned { Eof, @@ -90,30 +121,3 @@ pub enum TokenOwned { RightSquareBrace, Star, } - -impl<'str> From> for TokenOwned { - fn from(token: Token<'str>) -> Self { - match token { - Token::Eof => TokenOwned::Eof, - Token::Identifier(text) => TokenOwned::Identifier(text.to_string()), - Token::Boolean(boolean) => TokenOwned::Boolean(boolean), - Token::Float(float) => TokenOwned::Float(float), - Token::Integer(integer) => TokenOwned::Integer(integer), - Token::String(text) => TokenOwned::String(text.to_string()), - Token::IsEven => TokenOwned::IsEven, - Token::IsOdd => TokenOwned::IsOdd, - Token::Length => TokenOwned::Length, - Token::ReadLine => TokenOwned::ReadLine, - Token::WriteLine => TokenOwned::WriteLine, - Token::Comma => TokenOwned::Comma, - Token::Dot => TokenOwned::Dot, - Token::Equal => TokenOwned::Equal, - Token::Plus => TokenOwned::Plus, - Token::Star => TokenOwned::Star, - Token::LeftParenthesis => TokenOwned::LeftParenthesis, - Token::RightParenthesis => TokenOwned::RightParenthesis, - Token::LeftSquareBrace => TokenOwned::LeftSquareBrace, - Token::RightSquareBrace => TokenOwned::RightSquareBrace, - } - } -}