1
0

Add example; Clean up pratt parser

This commit is contained in:
Jeff 2024-04-27 03:40:05 -04:00
parent d9889ef2d8
commit 2b546e7b63
7 changed files with 58 additions and 19 deletions

1
Cargo.lock generated
View File

@ -367,6 +367,7 @@ dependencies = [
"colored", "colored",
"dust-lang", "dust-lang",
"env_logger", "env_logger",
"log",
"nu-ansi-term", "nu-ansi-term",
"reedline", "reedline",
] ]

View File

@ -36,7 +36,7 @@ impl Context {
for (identifier, (value_data, usage_data)) in other.variables.read()?.iter() { for (identifier, (value_data, usage_data)) in other.variables.read()?.iter() {
if let ValueData::Type(Type::Function { .. }) = value_data { if let ValueData::Type(Type::Function { .. }) = value_data {
log::trace!("Inheriting type of variable {identifier}."); log::trace!("Context inheriting type of {identifier}.");
self_data.insert(identifier.clone(), (value_data.clone(), usage_data.clone())); self_data.insert(identifier.clone(), (value_data.clone(), usage_data.clone()));
} }
@ -49,7 +49,7 @@ impl Context {
let mut self_data = self.variables.write()?; let mut self_data = self.variables.write()?;
for (identifier, (value_data, usage_data)) in other.variables.read()?.iter() { for (identifier, (value_data, usage_data)) in other.variables.read()?.iter() {
log::trace!("Inheriting variable {identifier}."); log::trace!("Context inheriting variable {identifier}.");
self_data.insert(identifier.clone(), (value_data.clone(), usage_data.clone())); self_data.insert(identifier.clone(), (value_data.clone(), usage_data.clone()));
} }
@ -65,7 +65,7 @@ impl Context {
pub fn get_type(&self, identifier: &Identifier) -> Result<Option<Type>, ValidationError> { pub fn get_type(&self, identifier: &Identifier) -> Result<Option<Type>, ValidationError> {
if let Some((value_data, _)) = self.variables.read()?.get(identifier) { if let Some((value_data, _)) = self.variables.read()?.get(identifier) {
log::trace!("Using {identifier}'s type."); log::trace!("Getting {identifier}'s type.");
let r#type = match value_data { let r#type = match value_data {
ValueData::Type(r#type) => r#type.clone(), ValueData::Type(r#type) => r#type.clone(),
@ -105,6 +105,19 @@ impl Context {
} }
} }
pub fn get_data(
&self,
identifier: &Identifier,
) -> Result<Option<(ValueData, UsageData)>, RwLockPoisonError> {
if let Some(full_data) = self.variables.read()?.get(identifier) {
log::trace!("Getting {identifier}'s value.");
Ok(Some(full_data.clone()))
} else {
Ok(None)
}
}
pub fn set_type(&self, identifier: Identifier, r#type: Type) -> Result<(), RwLockPoisonError> { pub fn set_type(&self, identifier: Identifier, r#type: Type) -> Result<(), RwLockPoisonError> {
log::debug!("Setting {identifier} to type {}.", r#type); log::debug!("Setting {identifier} to type {}.", r#type);

View File

@ -88,6 +88,8 @@ impl Interpreter {
), ),
]; ];
log::info!("Start loading standard library...");
let error = std_sources let error = std_sources
.into_par_iter() .into_par_iter()
.find_map_any(|(source_id, source)| { .find_map_any(|(source_id, source)| {
@ -139,6 +141,8 @@ impl Interpreter {
.err() .err()
}); });
log::info!("Finish loading standard library.");
if let Some(error) = error { if let Some(error) = error {
Err(error) Err(error)
} else { } else {

View File

@ -12,6 +12,10 @@ use crate::{
pub type ParserInput<'src> = pub type ParserInput<'src> =
SpannedInput<Token<'src>, SimpleSpan, &'src [(Token<'src>, SimpleSpan)]>; SpannedInput<Token<'src>, SimpleSpan, &'src [(Token<'src>, SimpleSpan)]>;
pub type ParserExtra<'src> = extra::Err<Rich<'src, Token<'src>, SimpleSpan>>;
pub type Comment = String;
pub fn parse<'src>(tokens: &'src [(Token<'src>, SimpleSpan)]) -> Result<AbstractTree, Vec<Error>> { pub fn parse<'src>(tokens: &'src [(Token<'src>, SimpleSpan)]) -> Result<AbstractTree, Vec<Error>> {
let statements = parser(false) let statements = parser(false)
.parse(tokens.spanned((tokens.len()..tokens.len()).into())) .parse(tokens.spanned((tokens.len()..tokens.len()).into()))
@ -28,8 +32,7 @@ pub fn parse<'src>(tokens: &'src [(Token<'src>, SimpleSpan)]) -> Result<Abstract
pub fn parser<'src>( pub fn parser<'src>(
allow_built_ins: bool, allow_built_ins: bool,
) -> impl Parser<'src, ParserInput<'src>, Vec<Statement>, extra::Err<Rich<'src, Token<'src>, SimpleSpan>>> ) -> impl Parser<'src, ParserInput<'src>, Vec<Statement>, ParserExtra<'src>> {
{
let identifiers: RefCell<HashMap<&str, Identifier>> = RefCell::new(HashMap::new()); let identifiers: RefCell<HashMap<&str, Identifier>> = RefCell::new(HashMap::new());
let _custom_types: Rc<RefCell<HashMap<Identifier, Type>>> = let _custom_types: Rc<RefCell<HashMap<Identifier, Type>>> =
Rc::new(RefCell::new(HashMap::new())); Rc::new(RefCell::new(HashMap::new()));
@ -335,8 +338,20 @@ pub fn parser<'src>(
) )
}); });
let list_index = choice((list.clone(), identifier_expression.clone()))
.then(expression.clone().delimited_by(
just(Token::Control(Control::SquareOpen)),
just(Token::Control(Control::SquareClose)),
))
.map_with(|(left, right), state| {
Expression::ListIndex(
Box::new(ListIndex::new(left, right)).with_position(state.span()),
)
});
let atom = choice(( let atom = choice((
map_index.clone(), map_index.clone(),
list_index.clone(),
range.clone(), range.clone(),
parsed_function.clone(), parsed_function.clone(),
list.clone(), list.clone(),
@ -358,18 +373,6 @@ pub fn parser<'src>(
Expression::Logic(Box::new(Logic::Not(expression)).with_position(span)) Expression::Logic(Box::new(Logic::Not(expression)).with_position(span))
}, },
), ),
postfix(
2,
expression.clone().delimited_by(
just(Token::Control(Control::SquareOpen)),
just(Token::Control(Control::SquareClose)),
),
|left, right, span| {
Expression::ListIndex(
Box::new(ListIndex::new(left, right)).with_position(span),
)
},
),
postfix( postfix(
3, 3,
turbofish.clone().or_not().then( turbofish.clone().or_not().then(
@ -499,6 +502,7 @@ pub fn parser<'src>(
choice(( choice((
logic_math_indexes_and_function_calls, logic_math_indexes_and_function_calls,
list_index,
map_index, map_index,
built_in_function_call, built_in_function_call,
range, range,

View File

@ -14,5 +14,6 @@ clap = { version = "4.5.3", features = ["derive"] }
colored = "2.1.0" colored = "2.1.0"
dust-lang = { path = "../dust-lang" } dust-lang = { path = "../dust-lang" }
env_logger = "0.11.3" env_logger = "0.11.3"
log = "0.4.21"
nu-ansi-term = "0.50.0" nu-ansi-term = "0.50.0"
reedline = { version = "0.30.0", features = ["sqlite", "system_clipboard"] } reedline = { version = "0.30.0", features = ["sqlite", "system_clipboard"] }

View File

@ -5,6 +5,7 @@ use ariadne::sources;
use clap::Parser; use clap::Parser;
use cli::run_shell; use cli::run_shell;
use colored::Colorize; use colored::Colorize;
use log::Level;
use std::{ use std::{
fs::read_to_string, fs::read_to_string,
@ -33,10 +34,16 @@ fn main() {
env_logger::Builder::from_env("DUST_LOG") env_logger::Builder::from_env("DUST_LOG")
.format(|buffer, record| { .format(|buffer, record| {
let args = record.args(); let args = record.args();
let log_level = record.level().to_string().bold(); let log_level = match record.level() {
Level::Trace => "TRACE".cyan().bold(),
Level::Warn => "WARN".yellow().bold(),
Level::Debug => "DEBUG".green().bold(),
Level::Error => "ERROR".red().bold(),
Level::Info => "INFO".white().bold(),
};
let timestamp = buffer.timestamp_seconds().to_string().dimmed(); let timestamp = buffer.timestamp_seconds().to_string().dimmed();
writeln!(buffer, "[{log_level} {timestamp}] {args}") writeln!(buffer, "[{} {}] {}", log_level, timestamp, args)
}) })
.init(); .init();

9
examples/json_length.ds Normal file
View File

@ -0,0 +1,9 @@
/*
This is is simple program to get the length of a json array.
Use `cargo run --package dust-shell examples/json_length.ds`
*/
input = fs.read_file('examples/assets/data.json')
data = json.parse(input)
list.length(data)