1
0
This commit is contained in:
Jeff 2024-11-28 02:03:58 -05:00
parent 5432001dff
commit dae5b7678c
5 changed files with 104 additions and 152 deletions

View File

@ -9,7 +9,7 @@ use std::hash::{Hash, Hasher};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ConcreteValue, Disassembler, FunctionType, Instruction, Scope, Span, Type}; use crate::{ConcreteValue, Disassembler, FunctionType, Instruction, Operation, Scope, Span, Type};
/// In-memory representation of a Dust program or function. /// In-memory representation of a Dust program or function.
/// ///
@ -125,6 +125,21 @@ impl Chunk {
.ok_or(ChunkError::InstructionIndexOutOfBounds { index }) .ok_or(ChunkError::InstructionIndexOutOfBounds { index })
} }
pub fn get_last_operations<const COUNT: usize>(&self) -> Option<[Operation; COUNT]> {
let mut n_operations = [Operation::Return; COUNT];
for (nth, operation) in n_operations.iter_mut().rev().zip(
self.instructions
.iter()
.rev()
.map(|(instruction, _, _)| instruction.operation()),
) {
*nth = operation;
}
Some(n_operations)
}
pub fn locals(&self) -> &Vec<Local> { pub fn locals(&self) -> &Vec<Local> {
&self.locals &self.locals
} }

View File

@ -17,10 +17,11 @@ use crate::{
Call, CallNative, Close, DefineLocal, GetLocal, Jump, LoadBoolean, LoadConstant, LoadList, Call, CallNative, Close, DefineLocal, GetLocal, Jump, LoadBoolean, LoadConstant, LoadList,
LoadSelf, Move, Negate, Not, Return, SetLocal, Test, LoadSelf, Move, Negate, Not, Return, SetLocal, Test,
}, },
optimize_control_flow, optimize_set_local,
value::ConcreteValue, value::ConcreteValue,
AnnotatedError, Argument, Chunk, ChunkError, Destination, DustError, FunctionType, Instruction, AnnotatedError, Argument, Chunk, ChunkError, Destination, DustError, FunctionType, Instruction,
LexError, Lexer, Local, NativeFunction, Operation, Optimizer, Scope, Span, Token, TokenKind, LexError, Lexer, Local, NativeFunction, Operation, Scope, Span, Token, TokenKind, TokenOwned,
TokenOwned, Type, TypeConflict, Type, TypeConflict,
}; };
/// Compiles the input and returns a chunk. /// Compiles the input and returns a chunk.
@ -234,22 +235,6 @@ impl<'src> Compiler<'src> {
}) })
} }
fn get_last_operations<const COUNT: usize>(&self) -> Option<[Operation; COUNT]> {
let mut n_operations = [Operation::Return; COUNT];
for (nth, operation) in n_operations.iter_mut().rev().zip(
self.chunk
.instructions()
.iter()
.rev()
.map(|(instruction, _, _)| instruction.operation()),
) {
*nth = operation;
}
Some(n_operations)
}
fn get_last_jumpable_mut_between( fn get_last_jumpable_mut_between(
&mut self, &mut self,
minimum: usize, minimum: usize,
@ -675,7 +660,7 @@ impl<'src> Compiler<'src> {
fn parse_comparison_binary(&mut self) -> Result<(), CompileError> { fn parse_comparison_binary(&mut self) -> Result<(), CompileError> {
if let Some([Operation::Equal | Operation::Less | Operation::LessEqual, _, _]) = if let Some([Operation::Equal | Operation::Less | Operation::LessEqual, _, _]) =
self.get_last_operations() self.chunk.get_last_operations()
{ {
return Err(CompileError::CannotChainComparison { return Err(CompileError::CannotChainComparison {
position: self.current_position, position: self.current_position,
@ -875,10 +860,7 @@ impl<'src> Compiler<'src> {
}); });
self.emit_instruction(set_local, Type::None, start_position); self.emit_instruction(set_local, Type::None, start_position);
optimize_set_local(&mut self.chunk);
let mut optimizer = Optimizer::new(&mut self.chunk);
optimizer.optimize_set_local();
return Ok(()); return Ok(());
} }
@ -979,7 +961,7 @@ impl<'src> Compiler<'src> {
self.parse_expression()?; self.parse_expression()?;
if matches!( if matches!(
self.get_last_operations(), self.chunk.get_last_operations(),
Some([ Some([
Operation::Equal | Operation::Less | Operation::LessEqual, Operation::Equal | Operation::Less | Operation::LessEqual,
Operation::Jump, Operation::Jump,
@ -1093,9 +1075,7 @@ impl<'src> Compiler<'src> {
.insert(if_block_start, (jump, Type::None, if_block_start_position)); .insert(if_block_start, (jump, Type::None, if_block_start_position));
if self.chunk.len() >= 4 { if self.chunk.len() >= 4 {
let mut optimizer = Optimizer::new(&mut self.chunk); optimize_control_flow(&mut self.chunk);
optimizer.optimize_control_flow();
} }
let else_last_register = self.next_register().saturating_sub(1); let else_last_register = self.next_register().saturating_sub(1);
@ -1119,7 +1099,7 @@ impl<'src> Compiler<'src> {
self.parse_expression()?; self.parse_expression()?;
if matches!( if matches!(
self.get_last_operations(), self.chunk.get_last_operations(),
Some([ Some([
Operation::Equal | Operation::Less | Operation::LessEqual, Operation::Equal | Operation::Less | Operation::LessEqual,
Operation::Jump, Operation::Jump,

View File

@ -8,7 +8,7 @@ pub mod instruction;
pub mod lexer; pub mod lexer;
pub mod native_function; pub mod native_function;
pub mod operation; pub mod operation;
pub mod optimizer; pub mod optimize;
pub mod scope; pub mod scope;
pub mod token; pub mod token;
pub mod r#type; pub mod r#type;
@ -23,7 +23,7 @@ pub use crate::instruction::{Argument, Destination, Instruction};
pub use crate::lexer::{lex, LexError, Lexer}; pub use crate::lexer::{lex, LexError, Lexer};
pub use crate::native_function::{NativeFunction, NativeFunctionError}; pub use crate::native_function::{NativeFunction, NativeFunctionError};
pub use crate::operation::Operation; pub use crate::operation::Operation;
pub use crate::optimizer::Optimizer; pub use crate::optimize::{optimize_control_flow, optimize_set_local};
pub use crate::r#type::{EnumType, FunctionType, StructType, Type, TypeConflict}; pub use crate::r#type::{EnumType, FunctionType, StructType, Type, TypeConflict};
pub use crate::scope::Scope; pub use crate::scope::Scope;
pub use crate::token::{display_token_list, Token, TokenKind, TokenOwned}; pub use crate::token::{display_token_list, Token, TokenKind, TokenOwned};

78
dust-lang/src/optimize.rs Normal file
View File

@ -0,0 +1,78 @@
//! Tools used by the compiler to optimize a chunk's bytecode.
use crate::{instruction::SetLocal, Chunk, Operation};
/// Optimizes a short control flow pattern.
///
/// Comparison and test instructions (which are always followed by a JUMP) can be optimized when
/// the next instructions are two constant or boolean loaders. The first loader is set to skip
/// an instruction if it is run while the second loader is modified to use the first's register.
/// This makes the following two code snippets compile to the same bytecode:
///
/// ```dust
/// 4 == 4
/// ```
///
/// ```dust
/// if 4 == 4 { true } else { false }
/// ```
///
/// The instructions must be in the following order:
/// - `Operation::Equal` | `Operation::Less` | `Operation::LessEqual` | `Operation::Test`
/// - `Operation::Jump`
/// - `Operation::LoadBoolean` | `Operation::LoadConstant`
/// - `Operation::LoadBoolean` | `Operation::LoadConstant`
pub fn optimize_control_flow(chunk: &mut Chunk) {
if !matches!(
chunk.get_last_operations(),
Some([
Operation::Equal | Operation::Less | Operation::LessEqual | Operation::Test,
Operation::Jump,
Operation::LoadBoolean | Operation::LoadConstant,
Operation::LoadBoolean | Operation::LoadConstant,
])
) {
return;
}
log::debug!("Consolidating registers for control flow optimization");
let instructions = chunk.instructions_mut();
let first_loader = &mut instructions.iter_mut().nth_back(1).unwrap().0;
first_loader.set_c_to_boolean(true);
let first_loader_register = first_loader.a();
let second_loader = &mut instructions.last_mut().unwrap().0;
let second_loader_new = *second_loader.clone().set_a(first_loader_register);
*second_loader = second_loader_new;
}
pub fn optimize_set_local(chunk: &mut Chunk) {
if !matches!(
chunk.get_last_operations(),
Some([
Operation::Add
| Operation::Subtract
| Operation::Multiply
| Operation::Divide
| Operation::Modulo,
Operation::SetLocal,
])
) {
return;
}
log::debug!("Condensing math and SetLocal to math instruction");
let instructions = chunk.instructions_mut();
let set_local = SetLocal::from(&instructions.pop().unwrap().0);
let math_instruction = instructions.last_mut().unwrap().0;
let math_instruction_new = *math_instruction
.clone()
.set_a(set_local.local_index)
.set_a_is_local(true);
instructions.last_mut().unwrap().0 = math_instruction_new;
}

View File

@ -1,121 +0,0 @@
//! Tool used by the compiler to optimize a chunk's bytecode.
use crate::{instruction::SetLocal, Chunk, Instruction, Operation, Span, Type};
/// An instruction optimizer that mutably borrows instructions from a chunk.
#[derive(Debug)]
pub struct Optimizer<'a> {
chunk: &'a mut Chunk,
}
impl<'a> Optimizer<'a> {
/// Creates a new optimizer with a mutable reference to some of a chunk's instructions.
pub fn new(instructions: &'a mut Chunk) -> Self {
Self {
chunk: instructions,
}
}
/// Optimizes a short control flow pattern.
///
/// Comparison and test instructions (which are always followed by a JUMP) can be optimized when
/// the next instructions are two constant or boolean loaders. The first loader is set to skip
/// an instruction if it is run while the second loader is modified to use the first's register.
/// This makes the following two code snippets compile to the same bytecode:
///
/// ```dust
/// 4 == 4
/// ```
///
/// ```dust
/// if 4 == 4 { true } else { false }
/// ```
///
/// The instructions must be in the following order:
/// - `Operation::Equal` | `Operation::Less` | `Operation::LessEqual` | `Operation::Test`
/// - `Operation::Jump`
/// - `Operation::LoadBoolean` | `Operation::LoadConstant`
/// - `Operation::LoadBoolean` | `Operation::LoadConstant`
pub fn optimize_control_flow(&mut self) -> bool {
if !matches!(
self.get_operations(),
Some([
Operation::Equal | Operation::Less | Operation::LessEqual | Operation::Test,
Operation::Jump,
Operation::LoadBoolean | Operation::LoadConstant,
Operation::LoadBoolean | Operation::LoadConstant,
])
) {
return false;
}
log::debug!("Consolidating registers for control flow optimization");
let instructions = self.instructions_mut();
let first_loader = &mut instructions.iter_mut().nth_back(1).unwrap().0;
first_loader.set_c_to_boolean(true);
let first_loader_register = first_loader.a();
let second_loader = &mut instructions.last_mut().unwrap().0;
let second_loader_new = *second_loader.clone().set_a(first_loader_register);
*second_loader = second_loader_new;
true
}
pub fn optimize_set_local(&mut self) -> bool {
if !matches!(
self.get_operations(),
Some([
Operation::Add
| Operation::Subtract
| Operation::Multiply
| Operation::Divide
| Operation::Modulo,
Operation::SetLocal,
])
) {
return false;
}
log::debug!("Condensing math and SetLocal to math instruction");
let instructions = self.instructions_mut();
let set_local = SetLocal::from(&instructions.pop().unwrap().0);
let math_instruction = instructions.last_mut().unwrap().0;
let math_instruction_new = *math_instruction
.clone()
.set_a(set_local.local_index)
.set_a_is_local(true);
instructions.last_mut().unwrap().0 = math_instruction_new;
true
}
fn instructions_mut(&mut self) -> &mut Vec<(Instruction, Type, Span)> {
self.chunk.instructions_mut()
}
fn get_operations<const COUNT: usize>(&self) -> Option<[Operation; COUNT]> {
if self.chunk.len() < COUNT {
return None;
}
let mut n_operations = [Operation::Return; COUNT];
for (nth, operation) in n_operations.iter_mut().rev().zip(
self.chunk
.instructions()
.iter()
.rev()
.map(|(instruction, _, _)| instruction.operation()),
) {
*nth = operation;
}
Some(n_operations)
}
}