2024-11-06 00:38:26 +00:00
|
|
|
//! Lexing tools and errors
|
2024-09-07 03:30:43 +00:00
|
|
|
//!
|
|
|
|
//! This module provides two lexing options:
|
|
|
|
//! - [`lex`], which lexes the entire input and returns a vector of tokens and their positions
|
|
|
|
//! - [`Lexer`], which lexes the input a token at a time
|
|
|
|
|
2024-10-13 11:14:12 +00:00
|
|
|
use std::fmt::{self, Display, Formatter};
|
|
|
|
|
2024-10-09 16:16:46 +00:00
|
|
|
use serde::{Deserialize, Serialize};
|
|
|
|
|
2024-11-06 20:40:37 +00:00
|
|
|
use crate::{dust_error::AnnotatedError, CompileError, DustError, Span, Token};
|
2024-09-07 03:30:43 +00:00
|
|
|
|
2024-11-06 00:38:26 +00:00
|
|
|
/// Lexes the input and returns a vector of tokens and their positions.
|
2024-09-07 03:30:43 +00:00
|
|
|
///
|
|
|
|
/// # Examples
|
|
|
|
/// ```
|
|
|
|
/// # use dust_lang::*;
|
|
|
|
/// let input = "x = 1 + 2";
|
|
|
|
/// let tokens = lex(input).unwrap();
|
|
|
|
///
|
|
|
|
/// assert_eq!(
|
|
|
|
/// tokens,
|
|
|
|
/// [
|
2024-09-07 17:51:05 +00:00
|
|
|
/// (Token::Identifier("x"), Span(0, 1)),
|
|
|
|
/// (Token::Equal, Span(2, 3)),
|
|
|
|
/// (Token::Integer("1"), Span(4, 5)),
|
|
|
|
/// (Token::Plus, Span(6, 7)),
|
|
|
|
/// (Token::Integer("2"), Span(8, 9)),
|
|
|
|
/// (Token::Eof, Span(9, 9)),
|
2024-09-07 03:30:43 +00:00
|
|
|
/// ]
|
|
|
|
/// );
|
|
|
|
/// ```
|
2024-11-28 10:02:51 +00:00
|
|
|
pub fn lex(source: &str) -> Result<Vec<(Token, Span)>, DustError> {
|
2024-09-07 03:30:43 +00:00
|
|
|
let mut lexer = Lexer::new(source);
|
|
|
|
let mut tokens = Vec::new();
|
|
|
|
|
2024-10-18 23:31:46 +00:00
|
|
|
loop {
|
2024-12-04 18:31:02 +00:00
|
|
|
let (token, span) = lexer
|
|
|
|
.next_token()
|
|
|
|
.map_err(|error| DustError::compile(CompileError::Lex(error), source))?;
|
2024-09-07 03:30:43 +00:00
|
|
|
|
2024-10-18 23:31:46 +00:00
|
|
|
tokens.push((token, span));
|
|
|
|
|
2024-11-07 23:48:53 +00:00
|
|
|
if lexer.is_eof() {
|
2024-10-18 23:31:46 +00:00
|
|
|
break;
|
|
|
|
}
|
2024-09-07 03:30:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(tokens)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Low-level tool for lexing a single token at a time.
|
|
|
|
///
|
2024-11-06 00:38:26 +00:00
|
|
|
/// See the [`lex`] function for an example of how to create and use a Lexer.
|
2024-10-09 16:16:46 +00:00
|
|
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
|
2024-09-07 03:30:43 +00:00
|
|
|
pub struct Lexer<'src> {
|
|
|
|
source: &'src str,
|
|
|
|
position: usize,
|
2024-11-07 23:48:53 +00:00
|
|
|
is_eof: bool,
|
2024-09-07 03:30:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'src> Lexer<'src> {
|
|
|
|
/// Create a new lexer for the given input.
|
|
|
|
pub fn new(source: &'src str) -> Self {
|
|
|
|
Lexer {
|
|
|
|
source,
|
|
|
|
position: 0,
|
2024-11-07 23:48:53 +00:00
|
|
|
is_eof: false,
|
2024-09-07 03:30:43 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-13 00:19:21 +00:00
|
|
|
pub fn source(&self) -> &'src str {
|
|
|
|
self.source
|
|
|
|
}
|
|
|
|
|
2024-10-13 20:46:45 +00:00
|
|
|
pub fn is_eof(&self) -> bool {
|
2024-11-07 23:48:53 +00:00
|
|
|
self.is_eof
|
2024-10-13 20:46:45 +00:00
|
|
|
}
|
|
|
|
|
2024-10-09 10:15:06 +00:00
|
|
|
pub fn skip_to(&mut self, position: usize) {
|
|
|
|
self.position = position;
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
/// Produce the next token.
|
|
|
|
pub fn next_token(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
self.skip_whitespace();
|
|
|
|
|
2024-11-07 22:59:28 +00:00
|
|
|
let (token, span) = if let Some(character) = self.peek_char() {
|
|
|
|
let lexer = LexRule::from(&character).lexer;
|
2024-09-07 03:30:43 +00:00
|
|
|
|
2024-11-07 22:59:28 +00:00
|
|
|
lexer(self)?
|
2024-09-07 03:30:43 +00:00
|
|
|
} else {
|
2024-11-07 23:48:53 +00:00
|
|
|
self.is_eof = true;
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(self.position, self.position))
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok((token, span))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Progress to the next character.
|
|
|
|
fn next_char(&mut self) -> Option<char> {
|
|
|
|
if let Some(c) = self.source[self.position..].chars().next() {
|
|
|
|
self.position += c.len_utf8();
|
|
|
|
|
|
|
|
Some(c)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Skip whitespace characters.
|
|
|
|
fn skip_whitespace(&mut self) {
|
|
|
|
while let Some(c) = self.peek_char() {
|
|
|
|
if c.is_whitespace() {
|
|
|
|
self.next_char();
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Peek at the next character without consuming it.
|
|
|
|
fn peek_char(&self) -> Option<char> {
|
|
|
|
self.source[self.position..].chars().next()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Peek at the second-to-next character without consuming it.
|
|
|
|
fn peek_second_char(&self) -> Option<char> {
|
|
|
|
self.source[self.position..].chars().nth(1)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Peek the next `n` characters without consuming them.
|
|
|
|
fn peek_chars(&self, n: usize) -> &'src str {
|
|
|
|
let remaining_source = &self.source[self.position..];
|
|
|
|
|
|
|
|
if remaining_source.len() < n {
|
|
|
|
remaining_source
|
|
|
|
} else {
|
|
|
|
&remaining_source[..n]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Lex an integer or float token.
|
2024-09-12 09:08:55 +00:00
|
|
|
fn lex_numeric(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
2024-09-07 03:30:43 +00:00
|
|
|
let start_pos = self.position;
|
|
|
|
let mut is_float = false;
|
2024-09-13 01:14:15 +00:00
|
|
|
let peek_char = self.peek_char();
|
2024-09-07 03:30:43 +00:00
|
|
|
|
2024-09-13 01:14:15 +00:00
|
|
|
if let Some('-') = peek_char {
|
2024-09-07 03:30:43 +00:00
|
|
|
self.next_char();
|
|
|
|
}
|
|
|
|
|
2024-09-13 01:14:15 +00:00
|
|
|
if let Some('0') = peek_char {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('x') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
let mut peek_chars = self.peek_chars(2).chars();
|
|
|
|
|
|
|
|
match (peek_chars.next(), peek_chars.next()) {
|
|
|
|
(Some('0'..='9' | 'A'..='f'), Some('0'..='9' | 'A'..='f')) => {
|
|
|
|
self.next_char();
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
let text = &self.source[start_pos..self.position];
|
|
|
|
|
|
|
|
return Ok((Token::Byte(text), Span(start_pos, self.position)));
|
|
|
|
}
|
|
|
|
(Some('0'..='9' | 'A'..='f'), erroneous) => {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
return Err(LexError::ExpectedAsciiHexDigit {
|
|
|
|
actual: erroneous,
|
|
|
|
position: self.position,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
(erroneous, _) => {
|
|
|
|
return Err(LexError::ExpectedAsciiHexDigit {
|
|
|
|
actual: erroneous,
|
|
|
|
position: self.position,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
while let Some(c) = self.peek_char() {
|
|
|
|
if c == '.' {
|
|
|
|
if let Some('0'..='9') = self.peek_second_char() {
|
|
|
|
if !is_float {
|
|
|
|
self.next_char();
|
|
|
|
}
|
|
|
|
|
2024-09-12 09:08:55 +00:00
|
|
|
is_float = true;
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
self.next_char();
|
|
|
|
|
2024-09-12 09:08:55 +00:00
|
|
|
while let Some(peek_char) = self.peek_char() {
|
|
|
|
if let '0'..='9' = peek_char {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let peek_second_char = self.peek_second_char();
|
2024-09-07 03:30:43 +00:00
|
|
|
|
2024-12-03 20:26:05 +00:00
|
|
|
if let ('e' | 'E', Some('0'..='9')) = (peek_char, peek_second_char) {
|
2024-09-07 03:30:43 +00:00
|
|
|
self.next_char();
|
2024-09-12 09:08:55 +00:00
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
continue;
|
2024-09-07 03:30:43 +00:00
|
|
|
}
|
|
|
|
|
2024-12-03 21:57:35 +00:00
|
|
|
if let ('e' | 'E', Some('+' | '-')) = (peek_char, peek_second_char) {
|
2024-09-12 09:08:55 +00:00
|
|
|
self.next_char();
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-12-10 13:04:47 +00:00
|
|
|
break;
|
2024-09-12 09:08:55 +00:00
|
|
|
}
|
2024-09-07 03:30:43 +00:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-12-04 05:04:56 +00:00
|
|
|
if c.is_ascii_digit() || c == '_' {
|
2024-09-07 03:30:43 +00:00
|
|
|
self.next_char();
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let text = &self.source[start_pos..self.position];
|
|
|
|
|
|
|
|
if is_float {
|
|
|
|
Ok((Token::Float(text), Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Integer(text), Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Lex an identifier token.
|
2024-11-07 23:48:53 +00:00
|
|
|
fn lex_keyword_or_identifier(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
2024-09-07 03:30:43 +00:00
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
while let Some(c) = self.peek_char() {
|
|
|
|
if c.is_ascii_alphanumeric() || c == '_' {
|
|
|
|
self.next_char();
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let string = &self.source[start_pos..self.position];
|
|
|
|
let token = match string {
|
|
|
|
"Infinity" => Token::Float("Infinity"),
|
|
|
|
"NaN" => Token::Float("NaN"),
|
|
|
|
"async" => Token::Async,
|
|
|
|
"bool" => Token::Bool,
|
|
|
|
"break" => Token::Break,
|
|
|
|
"else" => Token::Else,
|
|
|
|
"false" => Token::Boolean("false"),
|
|
|
|
"float" => Token::FloatKeyword,
|
2024-10-09 02:56:01 +00:00
|
|
|
"fn" => Token::Fn,
|
2024-09-07 03:30:43 +00:00
|
|
|
"if" => Token::If,
|
|
|
|
"int" => Token::Int,
|
|
|
|
"let" => Token::Let,
|
|
|
|
"loop" => Token::Loop,
|
|
|
|
"map" => Token::Map,
|
|
|
|
"mut" => Token::Mut,
|
2024-10-09 16:16:46 +00:00
|
|
|
"return" => Token::Return,
|
2024-10-09 10:15:06 +00:00
|
|
|
"str" => Token::Str,
|
2024-09-07 03:30:43 +00:00
|
|
|
"struct" => Token::Struct,
|
|
|
|
"true" => Token::Boolean("true"),
|
|
|
|
"while" => Token::While,
|
|
|
|
_ => Token::Identifier(string),
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok((token, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_string(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
while let Some(c) = self.peek_char() {
|
|
|
|
if c == '"' {
|
|
|
|
self.next_char();
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
self.next_char();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let text = &self.source[start_pos + 1..self.position - 1];
|
|
|
|
|
|
|
|
Ok((Token::String(text), Span(start_pos, self.position)))
|
|
|
|
}
|
2024-11-07 23:48:53 +00:00
|
|
|
|
|
|
|
fn lex_char(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_position = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
let char = self.source[self.position..].chars().next().unwrap();
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if self.peek_char() == Some('\'') {
|
|
|
|
self.next_char();
|
|
|
|
} else {
|
|
|
|
return Err(LexError::ExpectedCharacter {
|
|
|
|
expected: '\'',
|
|
|
|
actual: self.peek_char().unwrap_or('\0'),
|
|
|
|
position: self.position,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
let end_position = self.position;
|
|
|
|
|
|
|
|
Ok((Token::Character(char), Span(start_position, end_position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_plus(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::PlusEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Plus, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_minus(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
2024-11-08 00:22:22 +00:00
|
|
|
let start_position = self.position;
|
|
|
|
|
2024-11-08 00:23:59 +00:00
|
|
|
if self
|
|
|
|
.peek_second_char()
|
|
|
|
.is_some_and(|char| char.is_ascii_digit())
|
|
|
|
{
|
2024-11-08 00:22:22 +00:00
|
|
|
return self.lex_numeric();
|
|
|
|
}
|
2024-11-07 23:48:53 +00:00
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
2024-11-08 00:22:22 +00:00
|
|
|
return Ok((Token::MinusEqual, Span(start_position, self.position)));
|
|
|
|
}
|
|
|
|
|
2024-11-09 15:57:00 +00:00
|
|
|
if let Some('>') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
return Ok((Token::ArrowThin, Span(start_position, self.position)));
|
|
|
|
}
|
|
|
|
|
2024-11-08 00:22:22 +00:00
|
|
|
if self.peek_chars(8) == "Infinity" {
|
2024-11-07 23:48:53 +00:00
|
|
|
self.position += 8;
|
|
|
|
|
2024-11-08 00:22:22 +00:00
|
|
|
return Ok((
|
|
|
|
Token::Float("-Infinity"),
|
|
|
|
Span(start_position, self.position),
|
|
|
|
));
|
2024-11-07 23:48:53 +00:00
|
|
|
}
|
2024-11-08 00:22:22 +00:00
|
|
|
|
|
|
|
Ok((Token::Minus, Span(start_position, self.position)))
|
2024-11-07 23:48:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_star(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::StarEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Star, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_slash(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::SlashEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Slash, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_percent(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::PercentEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Percent, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_unexpected(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
Err(LexError::UnexpectedCharacter {
|
|
|
|
actual: self.peek_char().unwrap_or('\0'),
|
|
|
|
position: self.position,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_exclamation_mark(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::BangEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Bang, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_equal(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::DoubleEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Equal, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_less_than(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::LessEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Less, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_greater_than(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('=') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::GreaterEqual, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Greater, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_ampersand(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
let peek_char = self.peek_char();
|
|
|
|
|
|
|
|
if let Some('&') = peek_char {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::DoubleAmpersand, Span(start_pos, self.position)))
|
|
|
|
} else if peek_char.is_none() {
|
|
|
|
Err(LexError::UnexpectedEndOfFile {
|
|
|
|
position: self.position,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Err(LexError::ExpectedCharacter {
|
|
|
|
expected: '&',
|
|
|
|
actual: self.peek_char().unwrap(),
|
|
|
|
position: self.position,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_pipe(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
let peek_char = self.peek_char();
|
|
|
|
|
|
|
|
if let Some('|') = peek_char {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::DoublePipe, Span(start_pos, self.position)))
|
|
|
|
} else if peek_char.is_none() {
|
|
|
|
Err(LexError::UnexpectedEndOfFile {
|
|
|
|
position: self.position,
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
Err(LexError::ExpectedCharacter {
|
|
|
|
expected: '&',
|
|
|
|
actual: self.peek_char().unwrap(),
|
|
|
|
position: self.position,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_left_parenthesis(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::LeftParenthesis, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_right_parenthesis(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::RightParenthesis, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_left_bracket(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::LeftBracket, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_right_bracket(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::RightBracket, Span(start_pos, self.position)))
|
|
|
|
}
|
2024-11-08 00:22:22 +00:00
|
|
|
|
|
|
|
fn lex_left_brace(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::LeftBrace, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_right_brace(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::RightBrace, Span(start_pos, self.position)))
|
|
|
|
}
|
2024-11-09 15:57:00 +00:00
|
|
|
|
|
|
|
fn lex_semicolon(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::Semicolon, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_colon(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::Colon, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_comma(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::Comma, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn lex_dot(&mut self) -> Result<(Token<'src>, Span), LexError> {
|
|
|
|
let start_pos = self.position;
|
|
|
|
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
if let Some('.') = self.peek_char() {
|
|
|
|
self.next_char();
|
|
|
|
|
|
|
|
Ok((Token::DoubleDot, Span(start_pos, self.position)))
|
|
|
|
} else {
|
|
|
|
Ok((Token::Dot, Span(start_pos, self.position)))
|
|
|
|
}
|
|
|
|
}
|
2024-09-07 03:30:43 +00:00
|
|
|
}
|
|
|
|
|
2024-11-07 22:59:28 +00:00
|
|
|
type LexerFn<'src> = fn(&mut Lexer<'src>) -> Result<(Token<'src>, Span), LexError>;
|
|
|
|
|
|
|
|
pub struct LexRule<'src> {
|
|
|
|
lexer: LexerFn<'src>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'src> From<&char> for LexRule<'src> {
|
|
|
|
fn from(char: &char) -> Self {
|
|
|
|
match char {
|
|
|
|
'0'..='9' => LexRule {
|
|
|
|
lexer: Lexer::lex_numeric,
|
|
|
|
},
|
2024-11-08 00:22:22 +00:00
|
|
|
char if char.is_alphabetic() => LexRule {
|
2024-11-07 23:48:53 +00:00
|
|
|
lexer: Lexer::lex_keyword_or_identifier,
|
|
|
|
},
|
|
|
|
'"' => LexRule {
|
|
|
|
lexer: Lexer::lex_string,
|
|
|
|
},
|
|
|
|
'\'' => LexRule {
|
|
|
|
lexer: Lexer::lex_char,
|
|
|
|
},
|
|
|
|
'+' => LexRule {
|
|
|
|
lexer: Lexer::lex_plus,
|
|
|
|
},
|
|
|
|
'-' => LexRule {
|
|
|
|
lexer: Lexer::lex_minus,
|
|
|
|
},
|
|
|
|
'*' => LexRule {
|
|
|
|
lexer: Lexer::lex_star,
|
|
|
|
},
|
|
|
|
'/' => LexRule {
|
|
|
|
lexer: Lexer::lex_slash,
|
|
|
|
},
|
|
|
|
'%' => LexRule {
|
|
|
|
lexer: Lexer::lex_percent,
|
|
|
|
},
|
|
|
|
'!' => LexRule {
|
|
|
|
lexer: Lexer::lex_exclamation_mark,
|
|
|
|
},
|
|
|
|
'=' => LexRule {
|
|
|
|
lexer: Lexer::lex_equal,
|
|
|
|
},
|
|
|
|
'<' => LexRule {
|
|
|
|
lexer: Lexer::lex_less_than,
|
|
|
|
},
|
|
|
|
'>' => LexRule {
|
|
|
|
lexer: Lexer::lex_greater_than,
|
|
|
|
},
|
|
|
|
'&' => LexRule {
|
|
|
|
lexer: Lexer::lex_ampersand,
|
|
|
|
},
|
|
|
|
'|' => LexRule {
|
|
|
|
lexer: Lexer::lex_pipe,
|
|
|
|
},
|
|
|
|
'(' => LexRule {
|
|
|
|
lexer: Lexer::lex_left_parenthesis,
|
|
|
|
},
|
|
|
|
')' => LexRule {
|
|
|
|
lexer: Lexer::lex_right_parenthesis,
|
|
|
|
},
|
|
|
|
'[' => LexRule {
|
|
|
|
lexer: Lexer::lex_left_bracket,
|
|
|
|
},
|
|
|
|
']' => LexRule {
|
|
|
|
lexer: Lexer::lex_right_bracket,
|
|
|
|
},
|
2024-11-08 00:22:22 +00:00
|
|
|
'{' => LexRule {
|
|
|
|
lexer: Lexer::lex_left_brace,
|
|
|
|
},
|
|
|
|
'}' => LexRule {
|
|
|
|
lexer: Lexer::lex_right_brace,
|
|
|
|
},
|
2024-11-09 15:57:00 +00:00
|
|
|
';' => LexRule {
|
|
|
|
lexer: Lexer::lex_semicolon,
|
|
|
|
},
|
|
|
|
':' => LexRule {
|
|
|
|
lexer: Lexer::lex_colon,
|
|
|
|
},
|
|
|
|
',' => LexRule {
|
|
|
|
lexer: Lexer::lex_comma,
|
|
|
|
},
|
|
|
|
'.' => LexRule {
|
|
|
|
lexer: Lexer::lex_dot,
|
|
|
|
},
|
2024-11-07 23:48:53 +00:00
|
|
|
_ => LexRule {
|
|
|
|
lexer: Lexer::lex_unexpected,
|
|
|
|
},
|
2024-11-07 22:59:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
#[derive(Debug, PartialEq, Clone)]
|
|
|
|
pub enum LexError {
|
2024-09-12 09:08:55 +00:00
|
|
|
ExpectedAsciiHexDigit {
|
2024-09-13 01:14:15 +00:00
|
|
|
actual: Option<char>,
|
2024-09-12 09:08:55 +00:00
|
|
|
position: usize,
|
|
|
|
},
|
2024-09-07 03:30:43 +00:00
|
|
|
ExpectedCharacter {
|
|
|
|
expected: char,
|
|
|
|
actual: char,
|
|
|
|
position: usize,
|
|
|
|
},
|
2024-09-12 09:08:55 +00:00
|
|
|
ExpectedCharacterMultiple {
|
|
|
|
expected: &'static [char],
|
|
|
|
actual: char,
|
|
|
|
position: usize,
|
|
|
|
},
|
2024-09-07 03:30:43 +00:00
|
|
|
UnexpectedCharacter {
|
|
|
|
actual: char,
|
|
|
|
position: usize,
|
|
|
|
},
|
|
|
|
UnexpectedEndOfFile {
|
|
|
|
position: usize,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2024-09-10 13:26:05 +00:00
|
|
|
impl AnnotatedError for LexError {
|
|
|
|
fn title() -> &'static str {
|
2024-09-10 07:42:25 +00:00
|
|
|
"Lex Error"
|
|
|
|
}
|
|
|
|
|
2024-09-10 13:26:05 +00:00
|
|
|
fn description(&self) -> &'static str {
|
|
|
|
match self {
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedAsciiHexDigit { .. } => "Expected ASCII hex digit",
|
2024-09-10 13:26:05 +00:00
|
|
|
Self::ExpectedCharacter { .. } => "Expected character",
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedCharacterMultiple { .. } => "Expected one of multiple characters",
|
2024-09-10 13:26:05 +00:00
|
|
|
Self::UnexpectedCharacter { .. } => "Unexpected character",
|
|
|
|
Self::UnexpectedEndOfFile { .. } => "Unexpected end of file",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn details(&self) -> Option<String> {
|
2024-09-10 07:42:25 +00:00
|
|
|
match self {
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedAsciiHexDigit { actual, .. } => Some(format!(
|
|
|
|
"Expected ASCII hex digit (0-9 or A-F), found \"{}\"",
|
|
|
|
actual
|
2024-09-13 01:14:15 +00:00
|
|
|
.map(|character| character.to_string())
|
|
|
|
.unwrap_or("end of input".to_string())
|
2024-09-12 09:08:55 +00:00
|
|
|
)),
|
2024-09-10 07:42:25 +00:00
|
|
|
Self::ExpectedCharacter {
|
|
|
|
expected, actual, ..
|
2024-09-10 13:26:05 +00:00
|
|
|
} => Some(format!(
|
|
|
|
"Expected character \"{}\", found \"{}\"",
|
|
|
|
expected, actual
|
|
|
|
)),
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedCharacterMultiple {
|
|
|
|
expected, actual, ..
|
|
|
|
} => {
|
|
|
|
let mut details = "Expected one of the following characters ".to_string();
|
|
|
|
|
|
|
|
for (i, c) in expected.iter().enumerate() {
|
|
|
|
if i == expected.len() - 1 {
|
|
|
|
details.push_str(", or ");
|
|
|
|
} else if i > 0 {
|
|
|
|
details.push_str(", ");
|
|
|
|
}
|
|
|
|
details.push(*c);
|
|
|
|
}
|
|
|
|
|
|
|
|
details.push_str(&format!(" but found {}", actual));
|
|
|
|
|
|
|
|
Some(details)
|
|
|
|
}
|
2024-09-10 07:42:25 +00:00
|
|
|
Self::UnexpectedCharacter { actual, .. } => {
|
2024-09-10 13:26:05 +00:00
|
|
|
Some(format!("Unexpected character \"{}\"", actual))
|
2024-09-10 07:42:25 +00:00
|
|
|
}
|
2024-09-10 13:26:05 +00:00
|
|
|
Self::UnexpectedEndOfFile { .. } => Some("Unexpected end of file".to_string()),
|
2024-09-10 07:42:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-10 13:26:05 +00:00
|
|
|
fn position(&self) -> Span {
|
2024-09-07 03:30:43 +00:00
|
|
|
match self {
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedAsciiHexDigit { position, .. } => Span(*position, *position),
|
2024-09-07 03:30:43 +00:00
|
|
|
Self::ExpectedCharacter { position, .. } => Span(*position, *position),
|
2024-09-12 09:08:55 +00:00
|
|
|
Self::ExpectedCharacterMultiple { position, .. } => Span(*position, *position),
|
2024-09-07 03:30:43 +00:00
|
|
|
Self::UnexpectedCharacter { position, .. } => Span(*position, *position),
|
|
|
|
Self::UnexpectedEndOfFile { position } => Span(*position, *position),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-13 11:14:12 +00:00
|
|
|
impl Display for LexError {
|
|
|
|
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
|
|
|
|
write!(f, "{}", self.description())?;
|
|
|
|
|
|
|
|
if let Some(details) = self.details() {
|
|
|
|
write!(f, ": {}", details)?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn character() {
|
|
|
|
let input = "'a'";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Character('a'), Span(0, 3)),
|
|
|
|
(Token::Eof, Span(3, 3)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn map_expression() {
|
|
|
|
let input = "map { x = \"1\", y = 2, z = 3.0 }";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Map, Span(0, 3)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(4, 5)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("x"), Span(6, 7)),
|
|
|
|
(Token::Equal, Span(8, 9)),
|
|
|
|
(Token::String("1"), Span(10, 13)),
|
|
|
|
(Token::Comma, Span(13, 14)),
|
|
|
|
(Token::Identifier("y"), Span(15, 16)),
|
|
|
|
(Token::Equal, Span(17, 18)),
|
|
|
|
(Token::Integer("2"), Span(19, 20)),
|
|
|
|
(Token::Comma, Span(20, 21)),
|
|
|
|
(Token::Identifier("z"), Span(22, 23)),
|
|
|
|
(Token::Equal, Span(24, 25)),
|
|
|
|
(Token::Float("3.0"), Span(26, 29)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(30, 31)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(31, 31)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn let_statement() {
|
|
|
|
let input = "let x = 42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Let, Span(0, 3)),
|
|
|
|
(Token::Identifier("x"), Span(4, 5)),
|
|
|
|
(Token::Equal, Span(6, 7)),
|
|
|
|
(Token::Integer("42"), Span(8, 10)),
|
|
|
|
(Token::Eof, Span(10, 10)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn unit_struct() {
|
|
|
|
let input = "struct Foo";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Struct, Span(0, 6)),
|
|
|
|
(Token::Identifier("Foo"), Span(7, 10)),
|
|
|
|
(Token::Eof, Span(10, 10)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn tuple_struct() {
|
|
|
|
let input = "struct Foo(int, float)";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Struct, Span(0, 6)),
|
|
|
|
(Token::Identifier("Foo"), Span(7, 10)),
|
|
|
|
(Token::LeftParenthesis, Span(10, 11)),
|
|
|
|
(Token::Int, Span(11, 14)),
|
|
|
|
(Token::Comma, Span(14, 15)),
|
|
|
|
(Token::FloatKeyword, Span(16, 21)),
|
|
|
|
(Token::RightParenthesis, Span(21, 22)),
|
|
|
|
(Token::Eof, Span(22, 22))
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn fields_struct() {
|
|
|
|
let input = "struct FooBar { foo: int, bar: float }";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Struct, Span(0, 6)),
|
|
|
|
(Token::Identifier("FooBar"), Span(7, 13)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(14, 15)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("foo"), Span(16, 19)),
|
|
|
|
(Token::Colon, Span(19, 20)),
|
|
|
|
(Token::Int, Span(21, 24)),
|
|
|
|
(Token::Comma, Span(24, 25)),
|
|
|
|
(Token::Identifier("bar"), Span(26, 29)),
|
|
|
|
(Token::Colon, Span(29, 30)),
|
|
|
|
(Token::FloatKeyword, Span(31, 36)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(37, 38)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(38, 38))
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn list_index() {
|
|
|
|
let input = "[1, 2, 3][1]";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBracket, Span(0, 1)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Integer("1"), Span(1, 2)),
|
|
|
|
(Token::Comma, Span(2, 3)),
|
|
|
|
(Token::Integer("2"), Span(4, 5)),
|
|
|
|
(Token::Comma, Span(5, 6)),
|
|
|
|
(Token::Integer("3"), Span(7, 8)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBracket, Span(8, 9)),
|
|
|
|
(Token::LeftBracket, Span(9, 10)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Integer("1"), Span(10, 11)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBracket, Span(11, 12)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(12, 12)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn list() {
|
|
|
|
let input = "[1, 2, 3]";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBracket, Span(0, 1)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Integer("1"), Span(1, 2)),
|
|
|
|
(Token::Comma, Span(2, 3)),
|
|
|
|
(Token::Integer("2"), Span(4, 5)),
|
|
|
|
(Token::Comma, Span(5, 6)),
|
|
|
|
(Token::Integer("3"), Span(7, 8)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBracket, Span(8, 9)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(9, 9)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn map_field_access() {
|
|
|
|
let input = "{a = 1, b = 2, c = 3}.c";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(0, 1)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("a"), Span(1, 2)),
|
|
|
|
(Token::Equal, Span(3, 4)),
|
|
|
|
(Token::Integer("1"), Span(5, 6)),
|
|
|
|
(Token::Comma, Span(6, 7)),
|
|
|
|
(Token::Identifier("b"), Span(8, 9)),
|
|
|
|
(Token::Equal, Span(10, 11)),
|
|
|
|
(Token::Integer("2"), Span(12, 13)),
|
|
|
|
(Token::Comma, Span(13, 14)),
|
|
|
|
(Token::Identifier("c"), Span(15, 16)),
|
|
|
|
(Token::Equal, Span(17, 18)),
|
|
|
|
(Token::Integer("3"), Span(19, 20)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(20, 21)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Dot, Span(21, 22)),
|
|
|
|
(Token::Identifier("c"), Span(22, 23)),
|
|
|
|
(Token::Eof, Span(23, 23)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn range() {
|
|
|
|
let input = "0..42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("0"), Span(0, 1)),
|
|
|
|
(Token::DoubleDot, Span(1, 3)),
|
|
|
|
(Token::Integer("42"), Span(3, 5)),
|
|
|
|
(Token::Eof, Span(5, 5))
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn negate_expression() {
|
|
|
|
let input = "x = -42; -x";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Identifier("x"), Span(0, 1)),
|
|
|
|
(Token::Equal, Span(2, 3)),
|
|
|
|
(Token::Integer("-42"), Span(4, 7)),
|
|
|
|
(Token::Semicolon, Span(7, 8)),
|
|
|
|
(Token::Minus, Span(9, 10)),
|
|
|
|
(Token::Identifier("x"), Span(10, 11)),
|
|
|
|
(Token::Eof, Span(11, 11))
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn not_expression() {
|
|
|
|
let input = "!true; !false";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Bang, Span(0, 1)),
|
|
|
|
(Token::Boolean("true"), Span(1, 5)),
|
|
|
|
(Token::Semicolon, Span(5, 6)),
|
|
|
|
(Token::Bang, Span(7, 8)),
|
|
|
|
(Token::Boolean("false"), Span(8, 13)),
|
|
|
|
(Token::Eof, Span(13, 13))
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn if_else() {
|
|
|
|
let input = "if x < 10 { x + 1 } else { x }";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::If, Span(0, 2)),
|
|
|
|
(Token::Identifier("x"), Span(3, 4)),
|
|
|
|
(Token::Less, Span(5, 6)),
|
|
|
|
(Token::Integer("10"), Span(7, 9)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(10, 11)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("x"), Span(12, 13)),
|
|
|
|
(Token::Plus, Span(14, 15)),
|
|
|
|
(Token::Integer("1"), Span(16, 17)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(18, 19)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Else, Span(20, 24)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(25, 26)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("x"), Span(27, 28)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(29, 30)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(30, 30)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn while_loop() {
|
|
|
|
let input = "while x < 10 { x += 1 }";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::While, Span(0, 5)),
|
|
|
|
(Token::Identifier("x"), Span(6, 7)),
|
|
|
|
(Token::Less, Span(8, 9)),
|
|
|
|
(Token::Integer("10"), Span(10, 12)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(13, 14)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("x"), Span(15, 16)),
|
|
|
|
(Token::PlusEqual, Span(17, 19)),
|
|
|
|
(Token::Integer("1"), Span(20, 21)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(22, 23)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(23, 23)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_assign() {
|
|
|
|
let input = "x += 42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Identifier("x"), Span(0, 1)),
|
|
|
|
(Token::PlusEqual, Span(2, 4)),
|
|
|
|
(Token::Integer("42"), Span(5, 7)),
|
|
|
|
(Token::Eof, Span(7, 7)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn or() {
|
|
|
|
let input = "true || false";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Boolean("true"), Span(0, 4)),
|
|
|
|
(Token::DoublePipe, Span(5, 7)),
|
|
|
|
(Token::Boolean("false"), Span(8, 13)),
|
|
|
|
(Token::Eof, Span(13, 13)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn block() {
|
|
|
|
let input = "{ x = 42; y = \"foobar\" }";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBrace, Span(0, 1)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Identifier("x"), Span(2, 3)),
|
|
|
|
(Token::Equal, Span(4, 5)),
|
|
|
|
(Token::Integer("42"), Span(6, 8)),
|
|
|
|
(Token::Semicolon, Span(8, 9)),
|
|
|
|
(Token::Identifier("y"), Span(10, 11)),
|
|
|
|
(Token::Equal, Span(12, 13)),
|
|
|
|
(Token::String("foobar"), Span(14, 22)),
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::RightBrace, Span(23, 24)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(24, 24)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2024-09-22 22:17:21 +00:00
|
|
|
#[test]
|
|
|
|
fn not_equal() {
|
|
|
|
let input = "42 != 42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("42"), Span(0, 2)),
|
|
|
|
(Token::BangEqual, Span(3, 5)),
|
|
|
|
(Token::Integer("42"), Span(6, 8)),
|
|
|
|
(Token::Eof, Span(8, 8)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
#[test]
|
|
|
|
fn equal() {
|
|
|
|
let input = "42 == 42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("42"), Span(0, 2)),
|
|
|
|
(Token::DoubleEqual, Span(3, 5)),
|
|
|
|
(Token::Integer("42"), Span(6, 8)),
|
|
|
|
(Token::Eof, Span(8, 8)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn modulo() {
|
|
|
|
let input = "42 % 2";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("42"), Span(0, 2)),
|
|
|
|
(Token::Percent, Span(3, 4)),
|
|
|
|
(Token::Integer("2"), Span(5, 6)),
|
|
|
|
(Token::Eof, Span(6, 6)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn divide() {
|
|
|
|
let input = "42 / 2";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("42"), Span(0, 2)),
|
|
|
|
(Token::Slash, Span(3, 4)),
|
|
|
|
(Token::Integer("2"), Span(5, 6)),
|
|
|
|
(Token::Eof, Span(6, 6)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn greater_than() {
|
|
|
|
let input = ">";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![(Token::Greater, Span(0, 1)), (Token::Eof, Span(1, 1))])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn greater_than_or_equal() {
|
|
|
|
let input = ">=";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::GreaterEqual, Span(0, 2)),
|
|
|
|
(Token::Eof, Span(2, 2))
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn less_than() {
|
|
|
|
let input = "<";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![(Token::Less, Span(0, 1)), (Token::Eof, Span(1, 1))])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn less_than_or_equal() {
|
|
|
|
let input = "<=";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::LessEqual, Span(0, 2)),
|
|
|
|
(Token::Eof, Span(2, 2))
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn infinity() {
|
|
|
|
let input = "Infinity";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("Infinity"), Span(0, 8)),
|
|
|
|
(Token::Eof, Span(8, 8)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn negative_infinity() {
|
|
|
|
let input = "-Infinity";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("-Infinity"), Span(0, 9)),
|
|
|
|
(Token::Eof, Span(9, 9)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn nan() {
|
|
|
|
let input = "NaN";
|
|
|
|
|
|
|
|
assert!(lex(input).is_ok_and(|tokens| tokens[0].0 == Token::Float("NaN")));
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn complex_float() {
|
|
|
|
let input = "42.42e42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("42.42e42"), Span(0, 8)),
|
|
|
|
(Token::Eof, Span(8, 8)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn max_integer() {
|
|
|
|
let input = "9223372036854775807";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("9223372036854775807"), Span(0, 19)),
|
|
|
|
(Token::Eof, Span(19, 19)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn min_integer() {
|
|
|
|
let input = "-9223372036854775808";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("-9223372036854775808"), Span(0, 20)),
|
|
|
|
(Token::Eof, Span(20, 20)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn subtract_negative_integers() {
|
|
|
|
let input = "-42 - -42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("-42"), Span(0, 3)),
|
|
|
|
(Token::Minus, Span(4, 5)),
|
|
|
|
(Token::Integer("-42"), Span(6, 9)),
|
|
|
|
(Token::Eof, Span(9, 9)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn negative_integer() {
|
|
|
|
let input = "-42";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("-42"), Span(0, 3)),
|
|
|
|
(Token::Eof, Span(3, 3))
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn read_line() {
|
|
|
|
let input = "read_line()";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Identifier("read_line"), Span(0, 9)),
|
|
|
|
(Token::LeftParenthesis, Span(9, 10)),
|
|
|
|
(Token::RightParenthesis, Span(10, 11)),
|
|
|
|
(Token::Eof, Span(11, 11)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn write_line() {
|
|
|
|
let input = "write_line(\"Hello, world!\")";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-10-30 13:11:28 +00:00
|
|
|
(Token::Identifier("write_line"), Span(0, 10)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::LeftParenthesis, Span(10, 11)),
|
|
|
|
(Token::String("Hello, world!"), Span(11, 26)),
|
|
|
|
(Token::RightParenthesis, Span(26, 27)),
|
|
|
|
(Token::Eof, Span(27, 27)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn string_concatenation() {
|
|
|
|
let input = "\"Hello, \" + \"world!\"";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::String("Hello, "), Span(0, 9)),
|
|
|
|
(Token::Plus, Span(10, 11)),
|
|
|
|
(Token::String("world!"), Span(12, 20)),
|
|
|
|
(Token::Eof, Span(20, 20)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn string() {
|
|
|
|
let input = "\"Hello, world!\"";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::String("Hello, world!"), Span(0, 15)),
|
|
|
|
(Token::Eof, Span(15, 15)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn r#true() {
|
|
|
|
let input = "true";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Boolean("true"), Span(0, 4)),
|
|
|
|
(Token::Eof, Span(4, 4)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn r#false() {
|
|
|
|
let input = "false";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Boolean("false"), Span(0, 5)),
|
|
|
|
(Token::Eof, Span(5, 5))
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn property_access_function_call() {
|
|
|
|
let input = "42.is_even()";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("42"), Span(0, 2)),
|
|
|
|
(Token::Dot, Span(2, 3)),
|
|
|
|
(Token::Identifier("is_even"), Span(3, 10)),
|
|
|
|
(Token::LeftParenthesis, Span(10, 11)),
|
|
|
|
(Token::RightParenthesis, Span(11, 12)),
|
|
|
|
(Token::Eof, Span(12, 12)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn empty() {
|
|
|
|
let input = "";
|
|
|
|
|
|
|
|
assert_eq!(lex(input), Ok(vec![(Token::Eof, Span(0, 0))]))
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn reserved_identifier() {
|
|
|
|
let input = "length";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Identifier("length"), Span(0, 6)),
|
|
|
|
(Token::Eof, Span(6, 6)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn square_braces() {
|
|
|
|
let input = "[]";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
2024-11-07 23:48:53 +00:00
|
|
|
(Token::LeftBracket, Span(0, 1)),
|
|
|
|
(Token::RightBracket, Span(1, 2)),
|
2024-09-07 03:30:43 +00:00
|
|
|
(Token::Eof, Span(2, 2)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn small_float() {
|
|
|
|
let input = "1.23";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("1.23"), Span(0, 4)),
|
|
|
|
(Token::Eof, Span(4, 4)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
#[allow(clippy::excessive_precision)]
|
|
|
|
fn big_float() {
|
|
|
|
let input = "123456789.123456789";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("123456789.123456789"), Span(0, 19)),
|
|
|
|
(Token::Eof, Span(19, 19)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2024-09-12 09:08:55 +00:00
|
|
|
#[test]
|
|
|
|
fn float_with_exponent() {
|
|
|
|
let input = "1.23e4";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("1.23e4"), Span(0, 6)),
|
|
|
|
(Token::Eof, Span(6, 6)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn float_with_negative_exponent() {
|
|
|
|
let input = "1.23e-4";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("1.23e-4"), Span(0, 7)),
|
|
|
|
(Token::Eof, Span(7, 7)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn float_infinity_and_nan() {
|
|
|
|
let input = "Infinity -Infinity NaN";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Float("Infinity"), Span(0, 8)),
|
|
|
|
(Token::Float("-Infinity"), Span(9, 18)),
|
|
|
|
(Token::Float("NaN"), Span(19, 22)),
|
|
|
|
(Token::Eof, Span(22, 22)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2024-09-07 03:30:43 +00:00
|
|
|
#[test]
|
|
|
|
fn add() {
|
|
|
|
let input = "1 + 2";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("1"), Span(0, 1)),
|
|
|
|
(Token::Plus, Span(2, 3)),
|
|
|
|
(Token::Integer("2"), Span(4, 5)),
|
|
|
|
(Token::Eof, Span(5, 5)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn multiply() {
|
|
|
|
let input = "1 * 2";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("1"), Span(0, 1)),
|
|
|
|
(Token::Star, Span(2, 3)),
|
|
|
|
(Token::Integer("2"), Span(4, 5)),
|
|
|
|
(Token::Eof, Span(5, 5)),
|
|
|
|
])
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn add_and_multiply() {
|
|
|
|
let input = "1 + 2 * 3";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Integer("1"), Span(0, 1)),
|
|
|
|
(Token::Plus, Span(2, 3)),
|
|
|
|
(Token::Integer("2"), Span(4, 5)),
|
|
|
|
(Token::Star, Span(6, 7)),
|
|
|
|
(Token::Integer("3"), Span(8, 9)),
|
|
|
|
(Token::Eof, Span(9, 9)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn assignment() {
|
|
|
|
let input = "a = 1 + 2 * 3";
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
lex(input,),
|
|
|
|
Ok(vec![
|
|
|
|
(Token::Identifier("a"), Span(0, 1)),
|
|
|
|
(Token::Equal, Span(2, 3)),
|
|
|
|
(Token::Integer("1"), Span(4, 5)),
|
|
|
|
(Token::Plus, Span(6, 7)),
|
|
|
|
(Token::Integer("2"), Span(8, 9)),
|
|
|
|
(Token::Star, Span(10, 11)),
|
|
|
|
(Token::Integer("3"), Span(12, 13)),
|
|
|
|
(Token::Eof, Span(13, 13)),
|
|
|
|
])
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|