1
0

Add use statements with pre-compile std library

This commit is contained in:
Jeff 2024-07-01 14:23:01 -04:00
parent a79cb0b3e1
commit 699576c4c7
13 changed files with 273 additions and 134 deletions

19
Cargo.lock generated
View File

@ -125,6 +125,12 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "1.3.2" version = "1.3.2"
@ -372,6 +378,7 @@ dependencies = [
"log", "log",
"nu-ansi-term", "nu-ansi-term",
"reedline", "reedline",
"ron",
] ]
[[package]] [[package]]
@ -972,6 +979,18 @@ version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f"
[[package]]
name = "ron"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94"
dependencies = [
"base64",
"bitflags 2.5.0",
"serde",
"serde_derive",
]
[[package]] [[package]]
name = "rusqlite" name = "rusqlite"
version = "0.31.0" version = "0.31.0"

View File

@ -98,8 +98,8 @@ impl AbstractNode for MapIndex {
if let (Expression::Identifier(collection), Expression::Identifier(index)) = if let (Expression::Identifier(collection), Expression::Identifier(index)) =
(&self.collection, &self.index) (&self.collection, &self.index)
{ {
let collection = if let Some(collection) = context.get_value(&collection.node)? { let r#type = if let Some(r#type) = context.get_type(&collection.node)? {
collection r#type
} else { } else {
return Err(ValidationError::VariableNotFound { return Err(ValidationError::VariableNotFound {
identifier: collection.node.clone(), identifier: collection.node.clone(),
@ -107,9 +107,9 @@ impl AbstractNode for MapIndex {
}); });
}; };
if let ValueInner::Map(map) = collection.inner().as_ref() { if let Type::Map(map) = r#type {
return if let Some(value) = map.get(&index.node) { return if let Some(r#type) = map.get(&index.node) {
Ok(Some(value.r#type(context)?)) Ok(Some(r#type.clone()))
} else { } else {
Err(ValidationError::FieldNotFound { Err(ValidationError::FieldNotFound {
identifier: index.node.clone(), identifier: index.node.clone(),

View File

@ -106,7 +106,7 @@ pub enum Evaluation {
Return(Value), Return(Value),
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AbstractTree(Vec<Statement>); pub struct AbstractTree(Vec<Statement>);
impl AbstractTree { impl AbstractTree {
@ -120,6 +120,26 @@ impl AbstractTree {
AbstractTree(statements) AbstractTree(statements)
} }
pub(crate) fn define_types(&self, context: &Context) -> Result<(), ValidationError> {
for statement in &self.0 {
statement.define_types(context)?;
}
Ok(())
}
pub(crate) fn validate(
&self,
context: &Context,
manage_memory: bool,
) -> Result<(), ValidationError> {
for statement in &self.0 {
statement.validate(context, manage_memory)?;
}
Ok(())
}
pub fn run( pub fn run(
self, self,
context: &Context, context: &Context,

View File

@ -9,7 +9,7 @@ use crate::{
use super::{ use super::{
AbstractNode, Assignment, AsyncBlock, Block, EnumDeclaration, Evaluation, Expression, IfElse, AbstractNode, Assignment, AsyncBlock, Block, EnumDeclaration, Evaluation, Expression, IfElse,
Loop, SourcePosition, StructureDefinition, Type, TypeAlias, While, WithPosition, Loop, SourcePosition, StructureDefinition, Type, TypeAlias, Use, While, WithPosition,
}; };
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)] #[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize)]
@ -25,6 +25,7 @@ pub enum Statement {
EnumDeclaration(WithPosition<EnumDeclaration>), EnumDeclaration(WithPosition<EnumDeclaration>),
Expression(Expression), Expression(Expression),
While(WithPosition<While>), While(WithPosition<While>),
Use(WithPosition<Use>),
} }
impl Statement { impl Statement {
@ -41,6 +42,7 @@ impl Statement {
Statement::TypeAlias(inner) => inner.position, Statement::TypeAlias(inner) => inner.position,
Statement::EnumDeclaration(inner) => inner.position, Statement::EnumDeclaration(inner) => inner.position,
Statement::While(inner) => inner.position, Statement::While(inner) => inner.position,
Statement::Use(inner) => inner.position,
} }
} }
@ -73,6 +75,7 @@ impl AbstractNode for Statement {
enum_declaration.node.define_types(_context) enum_declaration.node.define_types(_context)
} }
Statement::While(r#while) => r#while.node.define_types(_context), Statement::While(r#while) => r#while.node.define_types(_context),
Statement::Use(r#use) => r#use.node.define_types(_context),
} }
} }
@ -90,6 +93,7 @@ impl AbstractNode for Statement {
Statement::IfElse(if_else) => if_else.node.validate(_context, _manage_memory), Statement::IfElse(if_else) => if_else.node.validate(_context, _manage_memory),
Statement::Loop(r#loop) => r#loop.node.validate(_context, _manage_memory), Statement::Loop(r#loop) => r#loop.node.validate(_context, _manage_memory),
Statement::While(r#while) => r#while.node.validate(_context, _manage_memory), Statement::While(r#while) => r#while.node.validate(_context, _manage_memory),
Statement::Use(r#use) => r#use.node.validate(_context, _manage_memory),
_ => Ok(()), _ => Ok(()),
} }
} }
@ -117,6 +121,7 @@ impl AbstractNode for Statement {
type_alias.node.evaluate(context, manage_memory) type_alias.node.evaluate(context, manage_memory)
} }
Statement::While(r#while) => r#while.node.evaluate(context, manage_memory), Statement::While(r#while) => r#while.node.evaluate(context, manage_memory),
Statement::Use(r#use) => r#use.node.evaluate(context, manage_memory),
}; };
if manage_memory { if manage_memory {
@ -143,6 +148,7 @@ impl AbstractNode for Statement {
enum_declaration.node.expected_type(_context) enum_declaration.node.expected_type(_context)
} }
Statement::While(r#while) => r#while.node.expected_type(_context), Statement::While(r#while) => r#while.node.expected_type(_context),
Statement::Use(r#use) => r#use.node.expected_type(_context),
} }
} }
} }
@ -161,6 +167,7 @@ impl Display for Statement {
Statement::EnumDeclaration(inner) => write!(f, "{}", inner.node), Statement::EnumDeclaration(inner) => write!(f, "{}", inner.node),
Statement::Expression(expression) => write!(f, "{expression}"), Statement::Expression(expression) => write!(f, "{expression}"),
Statement::While(inner) => write!(f, "{}", inner.node), Statement::While(inner) => write!(f, "{}", inner.node),
Statement::Use(inner) => write!(f, "{}", inner.node),
} }
} }
} }

View File

@ -1,11 +1,14 @@
use std::{fs::read_to_string, path::Path}; use std::{fmt::Display, fs::read_to_string, path::Path};
use chumsky::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ use crate::{
context::Context, context::Context,
error::{RuntimeError, ValidationError}, error::{DustError, RuntimeError, ValidationError},
lexer::{self, lex}, lexer::lex,
parser,
standard_library::{std_fs_compiled, std_io_compiled, std_json_compiled, std_thread_compiled},
Type, Type,
}; };
@ -16,16 +19,36 @@ pub struct Use {
path: String, path: String,
} }
impl Use {
pub fn new(path: String) -> Self {
Self { path }
}
}
impl AbstractNode for Use { impl AbstractNode for Use {
fn define_types(&self, context: &Context) -> Result<(), ValidationError> { fn define_types(&self, context: &Context) -> Result<(), ValidationError> {
Ok(()) match self.path.as_str() {
"std.io" => std_io_compiled().define_types(context),
_ => {
if Path::new(&self.path).exists() {
Ok(())
} else {
Err(ValidationError::CannotUsePath(self.path.clone()))
}
}
}
} }
fn validate(&self, context: &Context, manage_memory: bool) -> Result<(), ValidationError> { fn validate(&self, context: &Context, manage_memory: bool) -> Result<(), ValidationError> {
if Path::new(&self.path).exists() { match self.path.as_str() {
Ok(()) "std.io" => std_io_compiled().validate(context, manage_memory),
} else { _ => {
todo!() if Path::new(&self.path).exists() {
Ok(())
} else {
Err(ValidationError::CannotUsePath(self.path.clone()))
}
}
} }
} }
@ -34,10 +57,33 @@ impl AbstractNode for Use {
context: &Context, context: &Context,
manage_memory: bool, manage_memory: bool,
) -> Result<Option<Evaluation>, RuntimeError> { ) -> Result<Option<Evaluation>, RuntimeError> {
let file_contents = read_to_string(self.path)?; let abstact_tree = match self.path.as_str() {
"std.fs" => std_fs_compiled().clone(),
"std.io" => std_io_compiled().clone(),
"std.json" => std_json_compiled().clone(),
"std.thread" => std_thread_compiled().clone(),
path => {
let file_contents = read_to_string(path)?;
let tokens = lex(&file_contents).map_err(|errors| RuntimeError::Use(errors))?;
let abstract_tree = parser(false)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.map_err(|errors| {
RuntimeError::Use(
errors
.into_iter()
.map(|error| DustError::from(error))
.collect::<Vec<DustError>>(),
)
})?;
let tokens = lex(&file_contents).map_err(|errors| RuntimeError::Use(errors))?; abstract_tree
let abstract_tree = }
};
abstact_tree
.run(context, manage_memory)
.map_err(|errors| RuntimeError::Use(errors))?;
Ok(None) Ok(None)
} }
@ -46,3 +92,9 @@ impl AbstractNode for Use {
todo!() todo!()
} }
} }
impl Display for Use {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
todo!()
}
}

View File

@ -116,6 +116,7 @@ pub enum ValidationError {
index_type: Type, index_type: Type,
index_position: SourcePosition, index_position: SourcePosition,
}, },
CannotUsePath(String),
ExpectedString { ExpectedString {
actual: Type, actual: Type,
position: SourcePosition, position: SourcePosition,

View File

@ -318,8 +318,8 @@ pub fn lexer<'src>() -> impl Parser<
let identifier = text::ident().map(|text: &str| Token::Identifier(text)); let identifier = text::ident().map(|text: &str| Token::Identifier(text));
let r#use = just("use").ignore_then( let r#use = just("use").padded().ignore_then(
none_of('\n') none_of(" \n\r;")
.repeated() .repeated()
.to_slice() .to_slice()
.map(|text: &str| Token::Use(text.trim())), .map(|text: &str| Token::Use(text.trim())),
@ -350,8 +350,8 @@ mod tests {
#[test] #[test]
fn r#use() { fn r#use() {
assert_eq!( assert_eq!(
lex("use std/io.ds").unwrap(), lex("use std.io").unwrap(),
vec![(Token::Use("std/io.ds"), (0..13).into())] vec![(Token::Use("std.io"), (0..10).into())]
); );
assert_eq!( assert_eq!(

View File

@ -4,6 +4,7 @@ pub mod error;
pub mod identifier; pub mod identifier;
pub mod lexer; pub mod lexer;
pub mod parser; pub mod parser;
pub mod standard_library;
pub mod value; pub mod value;
use std::{ use std::{
@ -17,26 +18,14 @@ pub use value::Value;
use abstract_tree::AbstractTree; use abstract_tree::AbstractTree;
use ariadne::{Color, Fmt, Label, Report, ReportKind}; use ariadne::{Color, Fmt, Label, Report, ReportKind};
use chumsky::prelude::*;
use context::Context; use context::Context;
use error::{DustError, RuntimeError, TypeConflict, ValidationError}; use error::{DustError, RuntimeError, TypeConflict, ValidationError};
use lexer::{lex, Token}; use lexer::{lex, Token};
use parser::{parse, parser}; use parser::{parse, parser};
use rayon::prelude::*;
pub fn interpret(source_id: &str, source: &str) -> Result<Option<Value>, InterpreterError> { pub fn interpret(source_id: &str, source: &str) -> Result<Option<Value>, InterpreterError> {
let interpreter = Interpreter::new(Context::new(None)); let interpreter = Interpreter::new(Context::new(None));
interpreter.load_std()?;
interpreter.run(Arc::from(source_id), Arc::from(source))
}
pub fn interpret_without_std(
source_id: &str,
source: &str,
) -> Result<Option<Value>, InterpreterError> {
let interpreter = Interpreter::new(Context::new(None));
interpreter.run(Arc::from(source_id), Arc::from(source)) interpreter.run(Arc::from(source_id), Arc::from(source))
} }
@ -110,90 +99,9 @@ impl Interpreter {
Ok(value_option) Ok(value_option)
} }
pub fn load_std(&self) -> Result<(), InterpreterError> {
let std_core_source: (Arc<str>, Arc<str>) = (
Arc::from("std/core.ds"),
Arc::from(include_str!("../../std/core.ds")),
);
let std_sources: [(Arc<str>, Arc<str>); 4] = [
(
Arc::from("std/fs.ds"),
Arc::from(include_str!("../../std/fs.ds")),
),
(
Arc::from("std/io.ds"),
Arc::from(include_str!("../../std/io.ds")),
),
(
Arc::from("std/json.ds"),
Arc::from(include_str!("../../std/json.ds")),
),
(
Arc::from("std/thread.ds"),
Arc::from(include_str!("../../std/thread.ds")),
),
];
log::info!("Start loading standard library...");
// Always load the core library first because other parts of the standard library may depend
// on it.
self.run_with_builtins(std_core_source.0, std_core_source.1)?;
let error = if cfg!(test) {
// In debug mode, load the standard library sequentially to get consistent errors.
std_sources
.into_iter()
.find_map(|(source_id, source)| self.run_with_builtins(source_id, source).err())
} else {
// In release mode, load the standard library asynchronously.
std_sources
.into_par_iter()
.find_map_any(|(source_id, source)| self.run_with_builtins(source_id, source).err())
};
log::info!("Finish loading standard library.");
if let Some(error) = error {
Err(error)
} else {
Ok(())
}
}
pub fn sources(&self) -> vec::IntoIter<(Arc<str>, Arc<str>)> { pub fn sources(&self) -> vec::IntoIter<(Arc<str>, Arc<str>)> {
self.sources.read().unwrap().clone().into_iter() self.sources.read().unwrap().clone().into_iter()
} }
fn run_with_builtins(
&self,
source_id: Arc<str>,
source: Arc<str>,
) -> Result<Option<Value>, InterpreterError> {
let mut sources = self.sources.write().unwrap();
sources.push((source_id.clone(), source.clone()));
let tokens = lex(source.as_ref()).map_err(|errors| InterpreterError {
source_id: source_id.clone(),
errors,
})?;
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.map_err(|errors| InterpreterError {
source_id: source_id.clone(),
errors: errors
.into_iter()
.map(|error| DustError::from(error))
.collect(),
})?;
let value_option = abstract_tree
.run(&self.context, true)
.map_err(|errors| InterpreterError { source_id, errors })?;
Ok(value_option)
}
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -293,6 +201,7 @@ impl InterpreterError {
"This is the interpreter's fault. Please submit a bug with this error message." "This is the interpreter's fault. Please submit a bug with this error message."
} }
RuntimeError::SerdeJson(serde_json_error) => &serde_json_error.to_string(), RuntimeError::SerdeJson(serde_json_error) => &serde_json_error.to_string(),
RuntimeError::Use(_) => todo!(),
}; };
( (
@ -508,6 +417,7 @@ impl InterpreterError {
ValidationError::ExpectedList { .. } => todo!(), ValidationError::ExpectedList { .. } => todo!(),
ValidationError::BuiltInFunctionFailure(reason) => builder ValidationError::BuiltInFunctionFailure(reason) => builder
.add_label(Label::new((self.source_id.clone(), 0..0)).with_message(reason)), .add_label(Label::new((self.source_id.clone(), 0..0)).with_message(reason)),
ValidationError::CannotUsePath(_) => todo!(),
} }
} }
@ -522,10 +432,16 @@ impl InterpreterError {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use self::standard_library::std_full_compiled;
use super::*; use super::*;
#[test] #[test]
fn load_standard_library() { fn load_standard_library() {
Interpreter::new(Context::new(None)).load_std().unwrap(); let context = Context::new(None);
for abstract_tree in std_full_compiled() {
abstract_tree.run(&context, true).unwrap();
}
} }
} }

View File

@ -774,6 +774,13 @@ pub fn parser<'src>(
) )
}); });
let r#use = select! {
Token::Use(text) => text,
}
.map_with(|text, state| {
Statement::Use(Use::new(text.to_string()).with_position(state.span()))
});
comment comment
.repeated() .repeated()
.or_not() .or_not()
@ -788,6 +795,7 @@ pub fn parser<'src>(
r#while, r#while,
type_alias, type_alias,
enum_declaration, enum_declaration,
r#use,
))) )))
.then_ignore(just(Token::Symbol(Symbol::Semicolon)).or_not()) .then_ignore(just(Token::Symbol(Symbol::Semicolon)).or_not())
}); });

View File

@ -0,0 +1,109 @@
use std::sync::{Arc, OnceLock};
use crate::{abstract_tree::AbstractTree, lexer::lex, parser};
use chumsky::prelude::*;
pub fn std_full_compiled() -> [AbstractTree; 5] {
[
std_core_compiled().clone(),
std_fs_compiled().clone(),
std_io_compiled().clone(),
std_json_compiled().clone(),
std_thread_compiled().clone(),
]
}
pub const STD_CORE: &str = include_str!("../../std/core.ds");
pub const STD_FS: &str = include_str!("../../std/fs.ds");
pub const STD_IO: &str = include_str!("../../std/io.ds");
pub const STD_JSON: &str = include_str!("../../std/json.ds");
pub const STD_THREAD: &str = include_str!("../../std/thread.ds");
static CORE_SOURCE: OnceLock<(Arc<str>, Arc<str>)> = OnceLock::new();
pub fn core_source<'a>() -> &'a (Arc<str>, Arc<str>) {
CORE_SOURCE.get_or_init(|| (Arc::from("std/core.ds"), Arc::from(STD_CORE)))
}
static STD_SOURCES: OnceLock<[(Arc<str>, Arc<str>); 4]> = OnceLock::new();
pub fn std_sources<'a>() -> &'a [(Arc<str>, Arc<str>); 4] {
STD_SOURCES.get_or_init(|| {
[
(Arc::from("std/fs.ds"), Arc::from(STD_FS)),
(Arc::from("std/io.ds"), Arc::from(STD_IO)),
(Arc::from("std/json.ds"), Arc::from(STD_JSON)),
(Arc::from("std/thread.ds"), Arc::from(STD_THREAD)),
]
})
}
static STD_CORE_COMPILED: OnceLock<AbstractTree> = OnceLock::new();
pub fn std_core_compiled<'a>() -> &'a AbstractTree {
STD_CORE_COMPILED.get_or_init(|| {
let tokens = lex(STD_CORE).expect("Failed to lex.");
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.expect("Failed to parse.");
abstract_tree
})
}
static STD_FS_COMPILED: OnceLock<AbstractTree> = OnceLock::new();
pub fn std_fs_compiled<'a>() -> &'a AbstractTree {
STD_FS_COMPILED.get_or_init(|| {
let tokens = lex(STD_FS).expect("Failed to lex.");
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.expect("Failed to parse.");
abstract_tree
})
}
static STD_IO_COMPILED: OnceLock<AbstractTree> = OnceLock::new();
pub fn std_io_compiled<'a>() -> &'a AbstractTree {
STD_IO_COMPILED.get_or_init(|| {
let tokens = lex(STD_IO).expect("Failed to lex.");
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.expect("Failed to parse.");
abstract_tree
})
}
static STD_JSON_COMPILED: OnceLock<AbstractTree> = OnceLock::new();
pub fn std_json_compiled<'a>() -> &'a AbstractTree {
STD_JSON_COMPILED.get_or_init(|| {
let tokens = lex(STD_JSON).expect("Failed to lex.");
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.expect("Failed to parse.");
abstract_tree
})
}
static STD_THREAD_COMPILED: OnceLock<AbstractTree> = OnceLock::new();
pub fn std_thread_compiled<'a>() -> &'a AbstractTree {
STD_THREAD_COMPILED.get_or_init(|| {
let tokens = lex(STD_THREAD).expect("Failed to lex.");
let abstract_tree = parser(true)
.parse(tokens.spanned((tokens.len()..tokens.len()).into()))
.into_result()
.expect("Failed to parse.");
abstract_tree
})
}

View File

@ -46,7 +46,10 @@ fn callback() {
#[test] #[test]
fn built_in_function_call() { fn built_in_function_call() {
assert_eq!(interpret("test", "io.write_line('Hiya')"), Ok(None)); assert_eq!(
interpret("test", "use std.io io.write_line('Hiya')"),
Ok(None)
);
} }
#[test] #[test]

View File

@ -17,3 +17,4 @@ env_logger = "0.11.3"
log = "0.4.21" log = "0.4.21"
nu-ansi-term = "0.50.0" nu-ansi-term = "0.50.0"
reedline = { version = "0.30.0", features = ["sqlite", "system_clipboard"] } reedline = { version = "0.30.0", features = ["sqlite", "system_clipboard"] }
ron = "0.8.1"

View File

@ -33,7 +33,7 @@ struct Args {
parse: bool, parse: bool,
#[arg(long)] #[arg(long)]
no_std: bool, compile: bool,
/// Location of the file to run. /// Location of the file to run.
path: Option<String>, path: Option<String>,
@ -59,22 +59,6 @@ fn main() {
let context = Context::new(None); let context = Context::new(None);
let interpreter = Interpreter::new(context.clone()); let interpreter = Interpreter::new(context.clone());
if !args.no_std {
let load_std_result = interpreter.load_std();
if let Err(error) = load_std_result {
eprintln!("Failed to load standard library");
for report in error.build_reports() {
report
.write_for_stdout(sources(interpreter.sources()), stderr())
.unwrap();
}
return;
}
}
let (source_id, source): (Arc<str>, Arc<str>) = if let Some(path) = args.path { let (source_id, source): (Arc<str>, Arc<str>) = if let Some(path) = args.path {
let source = read_to_string(&path).unwrap(); let source = read_to_string(&path).unwrap();
@ -125,6 +109,25 @@ fn main() {
return; return;
} }
if args.compile {
match interpreter.parse(source_id, source.as_ref()) {
Ok(abstract_tree) => {
let ron = ron::to_string(&abstract_tree).unwrap();
println!("{ron}")
}
Err(error) => {
for report in error.build_reports() {
report
.write_for_stdout(sources(interpreter.sources()), stderr())
.unwrap();
}
}
}
return;
}
let run_result = interpreter.run(source_id.clone(), source); let run_result = interpreter.run(source_id.clone(), source);
match run_result { match run_result {