diff options
Diffstat (limited to 'third_party/rust/jsparagus-emitter')
16 files changed, 5233 insertions, 0 deletions
diff --git a/third_party/rust/jsparagus-emitter/.cargo-checksum.json b/third_party/rust/jsparagus-emitter/.cargo-checksum.json new file mode 100644 index 0000000000..5a4fce48c8 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{"Cargo.toml":"97ba372ad5ce5a4a16e8e2d0fd086b66e3383d6577b5c68de289e8767085b0a8","src/array_emitter.rs":"bbc6528321f1d11d7c86c4f2bfdcfc9dced8f0b8b1c30c9f0a5355f300d196b6","src/ast_emitter.rs":"050858e25f6bab6787771058afe504c66e74e9026e9ce873160bccc6366eca47","src/block_emitter.rs":"78965260d87a66c5324d6f3bdfea0f1938f8037f70adde148dbb2db599d1b2c0","src/compilation_info.rs":"b0d91b0f8d6940cb7087b474c3c814b758c8ce8d9027c415b76ad4af78be6140","src/control_structures.rs":"b32fbfff53bd378dcb45d63620006bea15c2fec1e7bc0bb163567dfe086e4931","src/dis.rs":"4a335d813fa965482ca0f20a7b9295a55ce7625b577d42bd8b33b156b81c6306","src/emitter.rs":"41a6a642d1970e625c264fc58bf245c6975b1e2d86707481ce4f942798c4b48a","src/emitter_scope.rs":"93c2b2a324ccb46b74adb2a28f56360a32652088e59c03641b4f1b608549dc78","src/expression_emitter.rs":"f8e02785dffb179bbe9fe58e45bbfccc08adc3ad0a071a0073bed0feedc8ed9a","src/function_declaration_emitter.rs":"0e6ae706ced215882f3a45b6e13f022ec1effa8edf1026b7ba7988810646982b","src/lib.rs":"f91576fb0f1e3cf444dd1d8ee25ee9bfd0b1e890e427a3863fdb6a4ad1611b61","src/object_emitter.rs":"998423b3d6ef8797fadef6763803627df72fde292b1b34d6a41b2e66a331a181","src/reference_op_emitter.rs":"e1507033c17318f59dbbdd1514d1bd1263f0c7d72530d0f2b2ab071d58b39f72","src/script_emitter.rs":"150a3d6330f40099ad1df8c52cd1831e3ef0fd4eecf44a20e3fff7acef2dd640"},"package":null}
\ No newline at end of file diff --git a/third_party/rust/jsparagus-emitter/Cargo.toml b/third_party/rust/jsparagus-emitter/Cargo.toml new file mode 100644 index 0000000000..0bc772dac6 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/Cargo.toml @@ -0,0 +1,34 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2018" +name = "jsparagus-emitter" +version = "0.1.0" +authors = ["khyperia <github@khyperia.com>"] +license = "MIT/Apache-2.0" + +[dependencies] +bumpalo = "3.4.0" +byteorder = "1.3.2" +indexmap = "2.0" + +[dependencies.jsparagus-ast] +path = "../ast" + +[dependencies.jsparagus-scope] +path = "../scope" + +[dependencies.jsparagus-stencil] +path = "../stencil" + +[dev-dependencies.jsparagus-parser] +path = "../parser" diff --git a/third_party/rust/jsparagus-emitter/src/array_emitter.rs b/third_party/rust/jsparagus-emitter/src/array_emitter.rs new file mode 100644 index 0000000000..b44137182e --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/array_emitter.rs @@ -0,0 +1,176 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; + +/// Struct for emitting bytecode for an array element. +pub struct ArrayElementEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub state: &'a mut ArrayEmitterState, + pub elem: F, +} + +impl<'a, F> ArrayElementEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] ARRAY INDEX? + + (self.elem)(emitter)?; + // [stack] ARRAY INDEX? ELEM + + match &mut self.state.0 { + ArrayEmitterStateInternal::BeforeSpread { ref mut index } => { + emitter.emit.init_elem_array(*index); + // [stack] ARRAY + + *index += 1; + } + ArrayEmitterStateInternal::AfterSpread => { + emitter.emit.init_elem_inc(); + // [stack] ARRAY INDEX+1 + } + } + + // [stack] ARRAY INDEX? + + Ok(()) + } +} + +/// Struct for emitting bytecode for an array element with hole. +pub struct ArrayElisionEmitter<'a> { + pub state: &'a mut ArrayEmitterState, +} + +impl<'a> ArrayElisionEmitter<'a> { + pub fn emit(self, emitter: &mut AstEmitter) { + // [stack] ARRAY INDEX? + + emitter.emit.hole(); + // [stack] ARRAY INDEX? HOLE + + match &mut self.state.0 { + ArrayEmitterStateInternal::BeforeSpread { ref mut index } => { + emitter.emit.init_elem_array(*index); + // [stack] ARRAY + + *index += 1; + } + ArrayEmitterStateInternal::AfterSpread => { + emitter.emit.init_elem_inc(); + // [stack] ARRAY INDEX+1 + } + } + + // [stack] ARRAY INDEX? + } +} + +/// Struct for emitting bytecode for an array element with spread. +pub struct ArraySpreadEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub state: &'a mut ArrayEmitterState, + pub elem: F, +} + +impl<'a, F> ArraySpreadEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] ARRAY INDEX? + + match self.state.0 { + ArrayEmitterStateInternal::BeforeSpread { index } => { + emitter.emit.numeric(index as f64); + } + _ => {} + } + self.state.0 = ArrayEmitterStateInternal::AfterSpread; + + // [stack] ARRAY INDEX + + Err(EmitError::NotImplemented("TODO: spread element")) + } +} + +enum ArrayEmitterStateInternal { + BeforeSpread { index: u32 }, + AfterSpread, +} + +/// Opaque struct that can be created only by ArrayEmitter. +/// This guarantees that Array*Emitter structs cannot be used outside +/// of ArrayEmitter callback. +pub struct ArrayEmitterState(ArrayEmitterStateInternal); + +impl ArrayEmitterState { + fn new() -> Self { + Self(ArrayEmitterStateInternal::BeforeSpread { index: 0 }) + } +} + +pub enum ArrayElementKind { + Normal, + Elision, + Spread, +} + +/// Struct for emitting bytecode for an array expression. +pub struct ArrayEmitter<'a, ElemT, ElemKindF, ElemF> +where + ElemKindF: Fn(&ElemT) -> ArrayElementKind, + ElemF: Fn(&mut AstEmitter, &mut ArrayEmitterState, &'a ElemT) -> Result<(), EmitError>, +{ + pub elements: std::slice::Iter<'a, ElemT>, + pub elem_kind: ElemKindF, + pub elem: ElemF, +} + +impl<'a, ElemT, ElemKindF, ElemF> ArrayEmitter<'a, ElemT, ElemKindF, ElemF> +where + ElemKindF: Fn(&ElemT) -> ArrayElementKind, + ElemF: Fn(&mut AstEmitter, &mut ArrayEmitterState, &'a ElemT) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + // Initialize the array to its minimum possible length. + let min_length = self + .elements + .clone() + .map(|e| match (self.elem_kind)(e) { + ArrayElementKind::Normal => 1, + ArrayElementKind::Elision => 1, + ArrayElementKind::Spread => 0, + }) + .sum::<u32>(); + + emitter.emit.new_array(min_length); + // [stack] ARRAY + + let mut state = ArrayEmitterState::new(); + for element in self.elements { + (self.elem)(emitter, &mut state, element)?; + // [stack] ARRAY INDEX? + } + + match state.0 { + ArrayEmitterStateInternal::AfterSpread => { + // [stack] ARRAY INDEX + + emitter.emit.pop(); + // [stack] ARRAY + } + _ => {} + } + + // [stack] ARRAY + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/ast_emitter.rs b/third_party/rust/jsparagus-emitter/src/ast_emitter.rs new file mode 100644 index 0000000000..287f5b8d35 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/ast_emitter.rs @@ -0,0 +1,961 @@ +//! High-level bytecode emitter. +//! +//! Converts AST nodes to bytecode. + +use crate::array_emitter::*; +use crate::block_emitter::BlockEmitter; +use crate::compilation_info::CompilationInfo; +use crate::emitter::{EmitError, EmitOptions, InstructionWriter}; +use crate::emitter_scope::{EmitterScopeStack, NameLocation}; +use crate::expression_emitter::*; +use crate::function_declaration_emitter::{ + AnnexBFunctionDeclarationEmitter, LazyFunctionEmitter, LexicalFunctionDeclarationEmitter, +}; +use crate::object_emitter::*; +use crate::reference_op_emitter::{ + AssignmentEmitter, CallEmitter, DeclarationEmitter, ElemReferenceEmitter, GetElemEmitter, + GetNameEmitter, GetPropEmitter, GetSuperElemEmitter, GetSuperPropEmitter, NameReferenceEmitter, + NewEmitter, PropReferenceEmitter, +}; +use crate::script_emitter::ScriptEmitter; +use ast::source_atom_set::{CommonSourceAtomSetIndices, SourceAtomSetIndex}; +use ast::types::*; +use stencil::opcode::Opcode; +use stencil::regexp::RegExpItem; +use stencil::result::EmitResult; +use stencil::script::ScriptStencil; + +use crate::control_structures::{ + BreakEmitter, CForEmitter, ContinueEmitter, ControlStructureStack, DoWhileEmitter, + ForwardJumpEmitter, JumpKind, LabelEmitter, WhileEmitter, +}; + +/// Emit a program, converting the AST directly to bytecode. +pub fn emit_program<'alloc>( + ast: &Program, + options: &EmitOptions, + mut compilation_info: CompilationInfo<'alloc>, +) -> Result<EmitResult<'alloc>, EmitError> { + let emitter = AstEmitter::new(options, &mut compilation_info); + + let script = match ast { + Program::Script(script) => emitter.emit_script(script)?, + _ => { + return Err(EmitError::NotImplemented("TODO: modules")); + } + }; + + compilation_info.scripts.set_top_level(script); + + Ok(EmitResult::new( + compilation_info.atoms.into(), + compilation_info.slices.into(), + compilation_info.scope_data_map.into(), + compilation_info.regexps.into(), + compilation_info.scripts.into(), + compilation_info.script_data_list.into(), + )) +} + +pub struct AstEmitter<'alloc, 'opt> { + pub emit: InstructionWriter, + pub scope_stack: EmitterScopeStack, + pub options: &'opt EmitOptions, + pub compilation_info: &'opt mut CompilationInfo<'alloc>, + pub control_stack: ControlStructureStack, +} + +impl<'alloc, 'opt> AstEmitter<'alloc, 'opt> { + fn new( + options: &'opt EmitOptions, + compilation_info: &'opt mut CompilationInfo<'alloc>, + ) -> Self { + Self { + emit: InstructionWriter::new(), + scope_stack: EmitterScopeStack::new(), + options, + compilation_info, + control_stack: ControlStructureStack::new(), + } + } + + pub fn lookup_name(&mut self, name: SourceAtomSetIndex) -> NameLocation { + self.scope_stack.lookup_name(name) + } + + pub fn lookup_name_in_var(&mut self, name: SourceAtomSetIndex) -> NameLocation { + self.scope_stack.lookup_name_in_var(name) + } + + fn emit_script(mut self, ast: &Script) -> Result<ScriptStencil, EmitError> { + let scope_data_map = &self.compilation_info.scope_data_map; + let function_declarations = &self.compilation_info.function_declarations; + + let scope_index = scope_data_map.get_global_index(); + let scope_data = scope_data_map.get_global_at(scope_index); + + let top_level_functions: Vec<&Function> = scope_data + .functions + .iter() + .map(|key| { + *function_declarations + .get(key) + .expect("function should exist") + }) + .collect(); + + ScriptEmitter { + top_level_functions: top_level_functions.iter(), + top_level_function: |emitter, fun| emitter.emit_top_level_function_declaration(fun), + statements: ast.statements.iter(), + statement: |emitter, statement| emitter.emit_statement(statement), + } + .emit(&mut self)?; + + let script = self.emit.into_stencil( + &mut self.compilation_info.script_data_list, + self.options.extent.clone(), + )?; + + Ok(script) + } + + fn emit_top_level_function_declaration(&mut self, fun: &Function) -> Result<(), EmitError> { + if fun.is_generator { + return Err(EmitError::NotImplemented("TODO: Generator")); + } + if fun.is_async { + return Err(EmitError::NotImplemented("TODO: Async function")); + } + + let stencil_index = *self + .compilation_info + .function_stencil_indices + .get(fun) + .expect("ScriptStencil should be created"); + + // NOTE: GCIndex for the function is implicitly handled by + // global_or_eval_decl_instantiation. + LazyFunctionEmitter { stencil_index }.emit(self); + + Ok(()) + } + + fn emit_non_top_level_function_declaration(&mut self, fun: &Function) -> Result<(), EmitError> { + if fun.is_generator { + return Err(EmitError::NotImplemented("TODO: Generator")); + } + if fun.is_async { + return Err(EmitError::NotImplemented("TODO: Async function")); + } + + let stencil_index = *self + .compilation_info + .function_stencil_indices + .get(fun) + .expect("ScriptStencil should be created"); + + let is_annex_b = self + .compilation_info + .function_declaration_properties + .is_annex_b(stencil_index); + + let fun_index = LazyFunctionEmitter { stencil_index }.emit(self); + + let name = self + .compilation_info + .scripts + .get(stencil_index) + .fun_name() + .expect("Function declaration should have name"); + + if is_annex_b { + AnnexBFunctionDeclarationEmitter { fun_index, name }.emit(self)?; + } else { + LexicalFunctionDeclarationEmitter { fun_index, name }.emit(self)?; + } + + Ok(()) + } + + fn emit_statement(&mut self, ast: &Statement) -> Result<(), EmitError> { + match ast { + Statement::ClassDeclaration(_) => { + return Err(EmitError::NotImplemented("TODO: ClassDeclaration")); + } + Statement::BlockStatement { block, .. } => { + let scope_data_map = &self.compilation_info.scope_data_map; + let function_declarations = &self.compilation_info.function_declarations; + + let scope_index = scope_data_map.get_index(block); + let scope_data = scope_data_map.get_lexical_at(scope_index); + + let functions: Vec<&Function> = scope_data + .functions + .iter() + .map(|key| { + *function_declarations + .get(key) + .expect("function should exist") + }) + .collect(); + + BlockEmitter { + scope_index: self.compilation_info.scope_data_map.get_index(block), + functions: functions.iter(), + function: |emitter, fun| emitter.emit_non_top_level_function_declaration(fun), + statements: block.statements.iter(), + statement: |emitter, statement| emitter.emit_statement(statement), + } + .emit(self)?; + } + Statement::BreakStatement { label, .. } => { + BreakEmitter { + label: label.as_ref().map(|x| x.value), + } + .emit(self); + } + Statement::ContinueStatement { label, .. } => { + ContinueEmitter { + label: label.as_ref().map(|x| x.value), + } + .emit(self); + } + Statement::DebuggerStatement { .. } => { + self.emit.debugger(); + } + Statement::DoWhileStatement { block, test, .. } => { + DoWhileEmitter { + enclosing_emitter_scope_depth: self.scope_stack.current_depth(), + block: |emitter| emitter.emit_statement(block), + test: |emitter| emitter.emit_expression(test), + } + .emit(self)?; + } + Statement::EmptyStatement { .. } => (), + Statement::ExpressionStatement(ast) => { + ExpressionEmitter { + expr: |emitter| emitter.emit_expression(ast), + } + .emit(self)?; + } + Statement::ForInStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: ForInStatement")); + } + Statement::ForOfStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: ForOfStatement")); + } + Statement::ForStatement { + init, + test, + update, + block, + .. + } => { + CForEmitter { + enclosing_emitter_scope_depth: self.scope_stack.current_depth(), + maybe_init: init, + maybe_test: test, + maybe_update: update, + init: |emitter, val| match val { + VariableDeclarationOrExpression::VariableDeclaration(ast) => { + emitter.emit_variable_declaration_statement(ast) + } + VariableDeclarationOrExpression::Expression(expr) => { + emitter.emit_expression(expr)?; + emitter.emit.pop(); + Ok(()) + } + }, + test: |emitter, expr| emitter.emit_expression(expr), + update: |emitter, expr| { + emitter.emit_expression(expr)?; + emitter.emit.pop(); + Ok(()) + }, + block: |emitter| emitter.emit_statement(block), + } + .emit(self)?; + } + Statement::IfStatement(if_statement) => { + self.emit_if(if_statement)?; + } + Statement::LabelledStatement { label, body, .. } => { + LabelEmitter { + enclosing_emitter_scope_depth: self.scope_stack.current_depth(), + name: label.value, + body: |emitter| emitter.emit_statement(body), + } + .emit(self)?; + } + Statement::ReturnStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: ReturnStatement")); + } + Statement::SwitchStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: SwitchStatement")); + } + Statement::SwitchStatementWithDefault { .. } => { + return Err(EmitError::NotImplemented( + "TODO: SwitchStatementWithDefault", + )); + } + Statement::ThrowStatement { expression, .. } => { + self.emit_expression(expression)?; + self.emit.throw_(); + } + Statement::TryCatchStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: TryCatchStatement")); + } + Statement::TryFinallyStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: TryFinallyStatement")); + } + Statement::VariableDeclarationStatement(ast) => { + self.emit_variable_declaration_statement(ast)?; + } + Statement::WhileStatement { test, block, .. } => { + WhileEmitter { + enclosing_emitter_scope_depth: self.scope_stack.current_depth(), + test: |emitter| emitter.emit_expression(test), + block: |emitter| emitter.emit_statement(block), + } + .emit(self)?; + } + Statement::WithStatement { .. } => { + return Err(EmitError::NotImplemented("TODO: WithStatement")); + } + Statement::FunctionDeclaration(_) => {} + }; + + Ok(()) + } + + fn emit_variable_declaration_statement( + &mut self, + ast: &VariableDeclaration, + ) -> Result<(), EmitError> { + match ast.kind { + VariableDeclarationKind::Var { .. } => {} + VariableDeclarationKind::Let { .. } | VariableDeclarationKind::Const { .. } => {} + } + + for decl in &ast.declarators { + if let Some(init) = &decl.init { + self.emit_declaration_assignment(&decl.binding, &init)?; + } + } + + Ok(()) + } + + fn emit_declaration_assignment( + &mut self, + binding: &Binding, + init: &Expression, + ) -> Result<(), EmitError> { + match binding { + Binding::BindingIdentifier(binding) => { + let name = binding.name.value; + DeclarationEmitter { + lhs: |emitter| Ok(NameReferenceEmitter { name }.emit_for_declaration(emitter)), + rhs: |emitter| emitter.emit_expression(init), + } + .emit(self)?; + self.emit.pop(); + } + _ => { + return Err(EmitError::NotImplemented("BindingPattern")); + } + } + + Ok(()) + } + + fn emit_this(&mut self) -> Result<(), EmitError> { + Err(EmitError::NotImplemented("TODO: this")) + } + + fn emit_if(&mut self, if_statement: &IfStatement) -> Result<(), EmitError> { + self.emit_expression(&if_statement.test)?; + + let alternate_jump = ForwardJumpEmitter { + jump: JumpKind::JumpIfFalse, + } + .emit(self); + + // Then branch + self.emit.jump_target(); + self.emit_statement(&if_statement.consequent)?; + + if let Some(alternate) = &if_statement.alternate { + let then_jump = ForwardJumpEmitter { + jump: JumpKind::Goto, + } + .emit(self); + // ^^ part of then branch + + // Else branch + alternate_jump.patch_not_merge(self); + self.emit_statement(alternate)?; + + // Merge point after else + then_jump.patch_merge(self); + } else { + // Merge point without else + alternate_jump.patch_merge(self); + } + + Ok(()) + } + + fn emit_expression(&mut self, ast: &Expression) -> Result<(), EmitError> { + match ast { + Expression::ClassExpression(_) => { + return Err(EmitError::NotImplemented("TODO: ClassExpression")); + } + + Expression::LiteralBooleanExpression { value, .. } => { + self.emit.emit_boolean(*value); + } + + Expression::LiteralInfinityExpression { .. } => { + self.emit.double_(std::f64::INFINITY); + } + + Expression::LiteralNullExpression { .. } => { + self.emit.null(); + } + + Expression::LiteralNumericExpression(num) => { + self.emit.numeric(num.value); + } + + Expression::LiteralRegExpExpression { + pattern, + global, + ignore_case, + multi_line, + dot_all, + sticky, + unicode, + .. + } => { + let item = RegExpItem { + pattern: *pattern, + global: *global, + ignore_case: *ignore_case, + multi_line: *multi_line, + dot_all: *dot_all, + sticky: *sticky, + unicode: *unicode, + }; + let regexp_index = self.compilation_info.regexps.push(item); + let index = self.emit.get_regexp_gcthing_index(regexp_index); + self.emit.reg_exp(index); + } + + Expression::LiteralStringExpression { value, .. } => { + let str_index = self.emit.get_atom_gcthing_index(*value); + self.emit.string(str_index); + } + + Expression::ArrayExpression(ast) => { + self.emit_array_expression(ast)?; + } + + Expression::ArrowExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: ArrowExpression")); + } + + Expression::AssignmentExpression { + binding, + expression, + .. + } => { + self.emit_assignment_expression(binding, expression)?; + } + + Expression::BinaryExpression { + operator, + left, + right, + .. + } => { + self.emit_binary_expression(operator, left, right)?; + } + + Expression::CallExpression(CallExpression { + callee, arguments, .. + }) => { + self.emit_call_expression(callee, arguments)?; + } + + Expression::CompoundAssignmentExpression { .. } => { + return Err(EmitError::NotImplemented( + "TODO: CompoundAssignmentExpression", + )); + } + + Expression::ConditionalExpression { + test, + consequent, + alternate, + .. + } => { + self.emit_conditional_expression(test, consequent, alternate)?; + } + + Expression::FunctionExpression(_) => { + return Err(EmitError::NotImplemented("TODO: FunctionExpression")); + } + + Expression::IdentifierExpression(ast) => { + self.emit_identifier_expression(ast); + } + + Expression::MemberExpression(ast) => { + self.emit_member_expression(ast)?; + } + + Expression::NewExpression { + callee, arguments, .. + } => { + self.emit_new_expression(callee, arguments)?; + } + + Expression::NewTargetExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: NewTargetExpression")); + } + + Expression::ObjectExpression(ast) => { + self.emit_object_expression(ast)?; + } + + Expression::OptionalChain { .. } => { + return Err(EmitError::NotImplemented("TODO: OptionalChain")); + } + + Expression::OptionalExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: OptionalExpression")); + } + + Expression::UnaryExpression { + operator, operand, .. + } => { + let opcode = match operator { + UnaryOperator::Plus { .. } => Opcode::Pos, + UnaryOperator::Minus { .. } => Opcode::Neg, + UnaryOperator::LogicalNot { .. } => Opcode::Not, + UnaryOperator::BitwiseNot { .. } => Opcode::BitNot, + UnaryOperator::Void { .. } => Opcode::Void, + UnaryOperator::Typeof { .. } => { + return Err(EmitError::NotImplemented("TODO: Typeof")); + } + UnaryOperator::Delete { .. } => { + return Err(EmitError::NotImplemented("TODO: Delete")); + } + }; + self.emit_expression(operand)?; + self.emit.emit_unary_op(opcode); + } + + Expression::TemplateExpression(_) => { + return Err(EmitError::NotImplemented("TODO: TemplateExpression")); + } + + Expression::ThisExpression { .. } => { + self.emit_this()?; + } + + Expression::UpdateExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: UpdateExpression")); + } + + Expression::YieldExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: YieldExpression")); + } + + Expression::YieldGeneratorExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: YieldGeneratorExpression")); + } + + Expression::AwaitExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: AwaitExpression")); + } + + Expression::ImportCallExpression { .. } => { + return Err(EmitError::NotImplemented("TODO: ImportCallExpression")); + } + } + + Ok(()) + } + + fn emit_binary_expression( + &mut self, + operator: &BinaryOperator, + left: &Expression, + right: &Expression, + ) -> Result<(), EmitError> { + let opcode = match operator { + BinaryOperator::Equals { .. } => Opcode::Eq, + BinaryOperator::NotEquals { .. } => Opcode::Ne, + BinaryOperator::StrictEquals { .. } => Opcode::StrictEq, + BinaryOperator::StrictNotEquals { .. } => Opcode::StrictNe, + BinaryOperator::LessThan { .. } => Opcode::Lt, + BinaryOperator::LessThanOrEqual { .. } => Opcode::Le, + BinaryOperator::GreaterThan { .. } => Opcode::Gt, + BinaryOperator::GreaterThanOrEqual { .. } => Opcode::Ge, + BinaryOperator::In { .. } => Opcode::In, + BinaryOperator::Instanceof { .. } => Opcode::Instanceof, + BinaryOperator::LeftShift { .. } => Opcode::Lsh, + BinaryOperator::RightShift { .. } => Opcode::Rsh, + BinaryOperator::RightShiftExt { .. } => Opcode::Ursh, + BinaryOperator::Add { .. } => Opcode::Add, + BinaryOperator::Sub { .. } => Opcode::Sub, + BinaryOperator::Mul { .. } => Opcode::Mul, + BinaryOperator::Div { .. } => Opcode::Div, + BinaryOperator::Mod { .. } => Opcode::Mod, + BinaryOperator::Pow { .. } => Opcode::Pow, + BinaryOperator::BitwiseOr { .. } => Opcode::BitOr, + BinaryOperator::BitwiseXor { .. } => Opcode::BitXor, + BinaryOperator::BitwiseAnd { .. } => Opcode::BitAnd, + + BinaryOperator::Coalesce { .. } => { + self.emit_short_circuit(JumpKind::Coalesce, left, right)?; + return Ok(()); + } + BinaryOperator::LogicalOr { .. } => { + self.emit_short_circuit(JumpKind::LogicalOr, left, right)?; + return Ok(()); + } + BinaryOperator::LogicalAnd { .. } => { + self.emit_short_circuit(JumpKind::LogicalAnd, left, right)?; + return Ok(()); + } + + BinaryOperator::Comma { .. } => { + self.emit_expression(left)?; + self.emit.pop(); + self.emit_expression(right)?; + return Ok(()); + } + }; + + self.emit_expression(left)?; + self.emit_expression(right)?; + self.emit.emit_binary_op(opcode); + Ok(()) + } + + fn emit_short_circuit( + &mut self, + jump: JumpKind, + left: &Expression, + right: &Expression, + ) -> Result<(), EmitError> { + self.emit_expression(left)?; + let jump = ForwardJumpEmitter { jump: jump }.emit(self); + self.emit.pop(); + self.emit_expression(right)?; + jump.patch_merge(self); + return Ok(()); + } + + fn emit_object_expression(&mut self, object: &ObjectExpression) -> Result<(), EmitError> { + ObjectEmitter { + properties: object.properties.iter(), + prop: |emitter, state, prop| emitter.emit_object_property(state, prop), + } + .emit(self) + } + + fn emit_object_property( + &mut self, + state: &mut ObjectEmitterState, + property: &ObjectProperty, + ) -> Result<(), EmitError> { + match property { + ObjectProperty::NamedObjectProperty(NamedObjectProperty::DataProperty( + DataProperty { + property_name, + expression: prop_value, + .. + }, + )) => match property_name { + PropertyName::StaticPropertyName(StaticPropertyName { value, .. }) => { + NamePropertyEmitter { + state, + key: *value, + value: |emitter| emitter.emit_expression(prop_value), + } + .emit(self)?; + } + PropertyName::StaticNumericPropertyName(NumericLiteral { value, .. }) => { + IndexPropertyEmitter { + state, + key: *value, + value: |emitter| emitter.emit_expression(prop_value), + } + .emit(self)?; + } + PropertyName::ComputedPropertyName(ComputedPropertyName { + expression: prop_key, + .. + }) => { + ComputedPropertyEmitter { + state, + key: |emitter| emitter.emit_expression(prop_key), + value: |emitter| emitter.emit_expression(prop_value), + } + .emit(self)?; + } + }, + _ => return Err(EmitError::NotImplemented("TODO: non data property")), + } + Ok(()) + } + + fn emit_array_expression(&mut self, array: &ArrayExpression) -> Result<(), EmitError> { + ArrayEmitter { + elements: array.elements.iter(), + elem_kind: |e| match e { + ArrayExpressionElement::Expression(..) => ArrayElementKind::Normal, + ArrayExpressionElement::Elision { .. } => ArrayElementKind::Elision, + ArrayExpressionElement::SpreadElement(..) => ArrayElementKind::Spread, + }, + elem: |emitter, state, elem| { + match elem { + ArrayExpressionElement::Expression(expr) => { + ArrayElementEmitter { + state, + elem: |emitter| emitter.emit_expression(expr), + } + .emit(emitter)?; + } + ArrayExpressionElement::Elision { .. } => { + ArrayElisionEmitter { state }.emit(emitter); + } + ArrayExpressionElement::SpreadElement(expr) => { + ArraySpreadEmitter { + state, + elem: |emitter| emitter.emit_expression(expr), + } + .emit(emitter)?; + } + } + Ok(()) + }, + } + .emit(self) + } + + fn emit_conditional_expression( + &mut self, + test: &Expression, + consequent: &Expression, + alternate: &Expression, + ) -> Result<(), EmitError> { + self.emit_expression(test)?; + + let else_jump = ForwardJumpEmitter { + jump: JumpKind::JumpIfFalse, + } + .emit(self); + + // Then branch + self.emit.jump_target(); + self.emit_expression(consequent)?; + + let finally_jump = ForwardJumpEmitter { + jump: JumpKind::Goto, + } + .emit(self); + + // Else branch + else_jump.patch_not_merge(self); + self.emit_expression(alternate)?; + + // Merge point + finally_jump.patch_merge(self); + + Ok(()) + } + + fn emit_assignment_expression( + &mut self, + binding: &AssignmentTarget, + expression: &Expression, + ) -> Result<(), EmitError> { + AssignmentEmitter { + lhs: |emitter| match binding { + AssignmentTarget::SimpleAssignmentTarget( + SimpleAssignmentTarget::AssignmentTargetIdentifier( + AssignmentTargetIdentifier { name, .. }, + ), + ) => Ok(NameReferenceEmitter { name: name.value }.emit_for_assignment(emitter)), + _ => Err(EmitError::NotImplemented( + "non-identifier assignment target", + )), + }, + rhs: |emitter| emitter.emit_expression(expression), + } + .emit(self) + } + + fn emit_identifier_expression(&mut self, ast: &IdentifierExpression) { + let name = ast.name.value; + GetNameEmitter { name }.emit(self); + } + + fn emit_member_expression(&mut self, ast: &MemberExpression) -> Result<(), EmitError> { + match ast { + MemberExpression::ComputedMemberExpression(ComputedMemberExpression { + object: ExpressionOrSuper::Expression(object), + expression, + .. + }) => GetElemEmitter { + obj: |emitter| emitter.emit_expression(object), + key: |emitter| emitter.emit_expression(expression), + } + .emit(self), + + MemberExpression::ComputedMemberExpression(ComputedMemberExpression { + object: ExpressionOrSuper::Super { .. }, + expression, + .. + }) => GetSuperElemEmitter { + this: |emitter| emitter.emit_this(), + key: |emitter| emitter.emit_expression(expression), + } + .emit(self), + + MemberExpression::StaticMemberExpression(StaticMemberExpression { + object: ExpressionOrSuper::Expression(object), + property, + .. + }) => GetPropEmitter { + obj: |emitter| emitter.emit_expression(object), + key: property.value, + } + .emit(self), + + MemberExpression::StaticMemberExpression(StaticMemberExpression { + object: ExpressionOrSuper::Super { .. }, + property, + .. + }) => GetSuperPropEmitter { + this: |emitter| emitter.emit_this(), + key: property.value, + } + .emit(self), + + MemberExpression::PrivateFieldExpression(PrivateFieldExpression { .. }) => { + Err(EmitError::NotImplemented("PrivateFieldExpression")) + } + } + } + + fn emit_new_expression( + &mut self, + callee: &Expression, + arguments: &Arguments, + ) -> Result<(), EmitError> { + for arg in &arguments.args { + if let Argument::SpreadElement(_) = arg { + return Err(EmitError::NotImplemented("TODO: SpreadNew")); + } + } + + NewEmitter { + callee: |emitter| emitter.emit_expression(callee), + arguments: |emitter| { + emitter.emit_arguments(arguments)?; + Ok(arguments.args.len()) + }, + } + .emit(self)?; + + Ok(()) + } + + fn emit_call_expression( + &mut self, + callee: &ExpressionOrSuper, + arguments: &Arguments, + ) -> Result<(), EmitError> { + // Don't do super handling in an emit_expresion_or_super because the bytecode heavily + // depends on how you're using the super + + CallEmitter { + callee: |emitter| match callee { + ExpressionOrSuper::Expression(expr) => match &**expr { + Expression::IdentifierExpression(IdentifierExpression { name, .. }) => { + if name.value == CommonSourceAtomSetIndices::eval() { + return Err(EmitError::NotImplemented("TODO: direct eval")); + } + Ok(NameReferenceEmitter { name: name.value }.emit_for_call(emitter)) + } + + Expression::MemberExpression(MemberExpression::StaticMemberExpression( + StaticMemberExpression { + object: ExpressionOrSuper::Expression(object), + property, + .. + }, + )) => PropReferenceEmitter { + obj: |emitter| emitter.emit_expression(object), + key: property.value, + } + .emit_for_call(emitter), + + Expression::MemberExpression(MemberExpression::ComputedMemberExpression( + ComputedMemberExpression { + object: ExpressionOrSuper::Expression(object), + expression, + .. + }, + )) => ElemReferenceEmitter { + obj: |emitter| emitter.emit_expression(object), + key: |emitter| emitter.emit_expression(expression), + } + .emit_for_call(emitter), + + _ => { + return Err(EmitError::NotImplemented( + "TODO: Call (only global functions are supported)", + )); + } + }, + _ => { + return Err(EmitError::NotImplemented("TODO: Super")); + } + }, + arguments: |emitter| { + emitter.emit_arguments(arguments)?; + Ok(arguments.args.len()) + }, + } + .emit(self)?; + + Ok(()) + } + + fn emit_arguments(&mut self, ast: &Arguments) -> Result<(), EmitError> { + for argument in &ast.args { + self.emit_argument(argument)?; + } + + Ok(()) + } + + fn emit_argument(&mut self, ast: &Argument) -> Result<(), EmitError> { + match ast { + Argument::Expression(ast) => self.emit_expression(ast)?, + Argument::SpreadElement(_) => { + return Err(EmitError::NotImplemented("TODO: SpreadElement")); + } + } + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/block_emitter.rs b/third_party/rust/jsparagus-emitter/src/block_emitter.rs new file mode 100644 index 0000000000..b6400a5832 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/block_emitter.rs @@ -0,0 +1,41 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; +use stencil::scope::ScopeIndex; + +pub struct BlockEmitter<'a, FuncT, FuncF, StmtT, StmtF> +where + FuncF: Fn(&mut AstEmitter, &FuncT) -> Result<(), EmitError>, + StmtF: Fn(&mut AstEmitter, &StmtT) -> Result<(), EmitError>, +{ + pub scope_index: ScopeIndex, + pub functions: std::slice::Iter<'a, FuncT>, + pub function: FuncF, + pub statements: std::slice::Iter<'a, StmtT>, + pub statement: StmtF, +} + +impl<'a, FuncT, FuncF, StmtT, StmtF> BlockEmitter<'a, FuncT, FuncF, StmtT, StmtF> +where + FuncF: Fn(&mut AstEmitter, &FuncT) -> Result<(), EmitError>, + StmtF: Fn(&mut AstEmitter, &StmtT) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + emitter.scope_stack.enter_lexical( + &mut emitter.emit, + &mut emitter.compilation_info.scope_data_map, + self.scope_index, + ); + + for fun in self.functions { + (self.function)(emitter, fun)?; + } + + for statement in self.statements { + (self.statement)(emitter, statement)?; + } + + emitter.scope_stack.leave_lexical(&mut emitter.emit); + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/compilation_info.rs b/third_party/rust/jsparagus-emitter/src/compilation_info.rs new file mode 100644 index 0000000000..1feeb70862 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/compilation_info.rs @@ -0,0 +1,45 @@ +use ast::associated_data::AssociatedData; +use ast::source_atom_set::SourceAtomSet; +use ast::source_slice_list::SourceSliceList; +use ast::types::Function; +use scope::data::FunctionDeclarationPropertyMap; +use std::collections::HashMap; +use stencil::regexp::RegExpList; +use stencil::scope::ScopeDataMap; +use stencil::script::{ImmutableScriptDataList, ScriptStencilIndex, ScriptStencilList}; + +pub struct CompilationInfo<'alloc> { + pub atoms: SourceAtomSet<'alloc>, + pub slices: SourceSliceList<'alloc>, + pub regexps: RegExpList, + pub scope_data_map: ScopeDataMap, + pub function_declarations: HashMap<ScriptStencilIndex, &'alloc Function<'alloc>>, + pub function_stencil_indices: AssociatedData<ScriptStencilIndex>, + pub function_declaration_properties: FunctionDeclarationPropertyMap, + pub scripts: ScriptStencilList, + pub script_data_list: ImmutableScriptDataList, +} + +impl<'alloc> CompilationInfo<'alloc> { + pub fn new( + atoms: SourceAtomSet<'alloc>, + slices: SourceSliceList<'alloc>, + scope_data_map: ScopeDataMap, + function_declarations: HashMap<ScriptStencilIndex, &'alloc Function<'alloc>>, + function_stencil_indices: AssociatedData<ScriptStencilIndex>, + function_declaration_properties: FunctionDeclarationPropertyMap, + scripts: ScriptStencilList, + ) -> Self { + Self { + atoms, + slices, + regexps: RegExpList::new(), + scope_data_map, + function_declarations, + function_stencil_indices, + function_declaration_properties, + scripts, + script_data_list: ImmutableScriptDataList::new(), + } + } +} diff --git a/third_party/rust/jsparagus-emitter/src/control_structures.rs b/third_party/rust/jsparagus-emitter/src/control_structures.rs new file mode 100644 index 0000000000..f19f423434 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/control_structures.rs @@ -0,0 +1,708 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; +use crate::emitter::InstructionWriter; +use crate::emitter_scope::{EmitterScope, EmitterScopeDepth}; +use ast::source_atom_set::SourceAtomSetIndex; +use stencil::bytecode_offset::{BytecodeOffset, BytecodeOffsetDiff}; + +// Control structures + +#[derive(Debug)] +pub enum JumpKind { + Coalesce, + LogicalAnd, + LogicalOr, + JumpIfFalse, + Goto, +} + +trait Jump { + fn jump_kind(&mut self) -> &JumpKind { + &JumpKind::Goto + } + + fn should_fallthrough(&mut self) -> bool { + // a fallthrough occurs if the jump is a conditional jump and if the + // condition doesn't met, the execution goes to the next opcode + // instead of the target of the jump. + match self.jump_kind() { + JumpKind::Coalesce { .. } + | JumpKind::LogicalOr { .. } + | JumpKind::LogicalAnd { .. } + | JumpKind::JumpIfFalse { .. } => true, + + JumpKind::Goto { .. } => false, + } + } + + fn emit_jump(&mut self, emitter: &mut AstEmitter) { + // in the c++ bytecode emitter, the jumplist is emitted + // and four bytes are used in order to save memory. We are not using that + // here, so instead we are using a placeholder offset set to 0, which will + // be updated later in patch_and_emit_jump_target. + let placeholder_offset = BytecodeOffsetDiff::uninitialized(); + match self.jump_kind() { + JumpKind::Coalesce { .. } => { + emitter.emit.coalesce(placeholder_offset); + } + JumpKind::LogicalOr { .. } => { + emitter.emit.or_(placeholder_offset); + } + JumpKind::LogicalAnd { .. } => { + emitter.emit.and_(placeholder_offset); + } + JumpKind::JumpIfFalse { .. } => { + emitter.emit.jump_if_false(placeholder_offset); + } + JumpKind::Goto { .. } => { + emitter.emit.goto_(placeholder_offset); + } + } + + // The JITs rely on a jump target being emitted after the + // conditional jump + if self.should_fallthrough() { + emitter.emit.jump_target(); + } + } +} + +#[derive(Debug)] +#[must_use] +pub struct JumpPatchEmitter { + offsets: Vec<BytecodeOffset>, + depth: usize, +} + +impl JumpPatchEmitter { + pub fn patch_merge(self, emitter: &mut AstEmitter) { + emitter.emit.emit_jump_target_and_patch(&self.offsets); + + // If the previous opcode fall-through, it should have the same stack + // depth. + debug_assert!(emitter.emit.stack_depth() == self.depth); + } + + pub fn patch_not_merge(self, emitter: &mut AstEmitter) { + emitter.emit.emit_jump_target_and_patch(&self.offsets); + // If the previous opcode doesn't fall-through, overwrite the stack + // depth. + emitter.emit.set_stack_depth(self.depth); + } +} + +// Struct for emitting bytecode for forward jump. +#[derive(Debug)] +pub struct ForwardJumpEmitter { + pub jump: JumpKind, +} +impl Jump for ForwardJumpEmitter { + fn jump_kind(&mut self) -> &JumpKind { + &self.jump + } +} + +impl ForwardJumpEmitter { + pub fn emit(&mut self, emitter: &mut AstEmitter) -> JumpPatchEmitter { + let offsets = vec![emitter.emit.bytecode_offset()]; + self.emit_jump(emitter); + let depth = emitter.emit.stack_depth(); + + JumpPatchEmitter { offsets, depth } + } +} + +pub trait Breakable { + fn register_break(&mut self, offset: BytecodeOffset); + fn emit_break_target_and_patch(&mut self, emit: &mut InstructionWriter); +} + +pub trait Continuable { + fn register_continue(&mut self, offset: BytecodeOffset); + + fn emit_continue_target_and_patch(&mut self, emit: &mut InstructionWriter); +} + +#[derive(Debug, PartialEq)] +pub struct LoopControl { + enclosing_emitter_scope_depth: EmitterScopeDepth, + breaks: Vec<BytecodeOffset>, + continues: Vec<BytecodeOffset>, + head: BytecodeOffset, +} + +impl Breakable for LoopControl { + fn register_break(&mut self, offset: BytecodeOffset) { + // offset points to the location of the jump, which will need to be updated + // once we emit the jump target in emit_jump_target_and_patch + self.breaks.push(offset); + } + + fn emit_break_target_and_patch(&mut self, emit: &mut InstructionWriter) { + emit.emit_jump_target_and_patch(&self.breaks); + } +} + +impl Continuable for LoopControl { + fn register_continue(&mut self, offset: BytecodeOffset) { + // offset points to the location of the jump, which will need to be updated + // once we emit the jump target in emit_jump_target_and_patch + self.continues.push(offset); + } + + fn emit_continue_target_and_patch(&mut self, emit: &mut InstructionWriter) { + emit.emit_jump_target_and_patch(&self.continues); + } +} + +impl LoopControl { + pub fn new( + emit: &mut InstructionWriter, + depth: u8, + enclosing_emitter_scope_depth: EmitterScopeDepth, + ) -> Self { + let offset = LoopControl::open_loop(emit, depth); + Self { + enclosing_emitter_scope_depth, + breaks: Vec::new(), + continues: Vec::new(), + head: offset, + } + } + + fn open_loop(emit: &mut InstructionWriter, depth: u8) -> BytecodeOffset { + // Insert a Nop if needed to ensure the script does not start with a + // JSOp::LoopHead. This avoids JIT issues with prologue code + try notes + // or OSR. See bug 1602390 and bug 1602681. + let mut offset = emit.bytecode_offset(); + if offset.offset == 0 { + emit.nop(); + offset = emit.bytecode_offset(); + } + // emit the jump target for the loop head + emit.loop_head(depth); + offset + } + + pub fn close_loop(&mut self, emit: &mut InstructionWriter) { + let offset = emit.bytecode_offset(); + let diff_to_head = self.head.diff_from(offset); + + emit.goto_(diff_to_head); + } +} + +#[derive(Debug, PartialEq)] +pub struct LabelControl { + enclosing_emitter_scope_depth: EmitterScopeDepth, + name: SourceAtomSetIndex, + breaks: Vec<BytecodeOffset>, + head: BytecodeOffset, +} + +impl Breakable for LabelControl { + fn register_break(&mut self, offset: BytecodeOffset) { + // offset points to the location of the jump, which will need to be updated + // once we emit the jump target in emit_jump_target_and_patch + self.breaks.push(offset); + } + + fn emit_break_target_and_patch(&mut self, emit: &mut InstructionWriter) { + if !self.breaks.is_empty() { + emit.emit_jump_target_and_patch(&self.breaks); + } + } +} + +impl LabelControl { + pub fn new( + name: SourceAtomSetIndex, + emit: &mut InstructionWriter, + enclosing_emitter_scope_depth: EmitterScopeDepth, + ) -> Self { + let offset = emit.bytecode_offset(); + Self { + enclosing_emitter_scope_depth, + name, + head: offset, + breaks: Vec::new(), + } + } +} + +#[derive(Debug, PartialEq)] +pub enum Control { + Loop(LoopControl), + Label(LabelControl), +} + +impl Control { + fn enclosing_emitter_scope_depth(&self) -> EmitterScopeDepth { + match self { + Control::Loop(control) => control.enclosing_emitter_scope_depth, + Control::Label(control) => control.enclosing_emitter_scope_depth, + } + } +} + +// Compared to C++ impl, this uses explicit stack struct, +// given Rust cannot store a reference of stack-allocated object into +// another object that has longer-lifetime. +pub struct ControlStructureStack { + control_stack: Vec<Control>, +} + +impl ControlStructureStack { + pub fn new() -> Self { + Self { + control_stack: Vec::new(), + } + } + + pub fn open_loop( + &mut self, + emit: &mut InstructionWriter, + enclosing_emitter_scope_depth: EmitterScopeDepth, + ) { + let depth = (self.control_stack.len() + 1) as u8; + + let new_loop = Control::Loop(LoopControl::new(emit, depth, enclosing_emitter_scope_depth)); + + self.control_stack.push(new_loop); + } + + pub fn open_label( + &mut self, + name: SourceAtomSetIndex, + emit: &mut InstructionWriter, + enclosing_emitter_scope_depth: EmitterScopeDepth, + ) { + let new_label = LabelControl::new(name, emit, enclosing_emitter_scope_depth); + self.control_stack.push(Control::Label(new_label)); + } + + pub fn register_break(&mut self, offset: BytecodeOffset) { + let innermost = self.innermost(); + + match innermost { + Control::Label(control) => control.register_break(offset), + Control::Loop(control) => control.register_break(offset), + } + } + + pub fn register_continue(&mut self, offset: BytecodeOffset) { + let innermost = self.innermost(); + match innermost { + Control::Label(_) => panic!( + "Should not register continue on a label. This should be caught by early errors." + ), + Control::Loop(control) => control.register_continue(offset), + } + } + + pub fn register_labelled_break(&mut self, label: SourceAtomSetIndex, offset: BytecodeOffset) { + match self.find_labelled_control(label) { + Control::Label(control) => control.register_break(offset), + Control::Loop(control) => control.register_break(offset), + } + } + + pub fn register_labelled_continue( + &mut self, + label: SourceAtomSetIndex, + offset: BytecodeOffset, + ) { + if let Some(control) = self.find_labelled_loop(label) { + control.register_continue(offset); + } else { + panic!( + "A labelled continue was passed, but no label was found. This should be caught by early errors" + ) + } + } + + pub fn find_labelled_loop(&mut self, label: SourceAtomSetIndex) -> Option<&mut LoopControl> { + let label_index = self.find_labelled_index(label); + // To find the associated loop for a label, we can take the label's index + 1, as the + // associated loop should always be in the position after the label. + let control = self.control_stack.get_mut(label_index + 1); + match control { + Some(Control::Loop(control)) => Some(control), + _ => None, + } + } + + pub fn find_labelled_control(&mut self, label: SourceAtomSetIndex) -> &mut Control { + self.control_stack + .iter_mut() + .find(|control| match control { + Control::Label(control) => { + if control.name == label { + return true; + } + false + } + _ => false, + }) + .expect("there should be a control with this label") + } + + pub fn find_labelled_index(&mut self, label: SourceAtomSetIndex) -> usize { + self.control_stack + .iter() + .position(|control| match control { + Control::Label(control) => { + if control.name == label { + return true; + } + false + } + _ => false, + }) + .expect("there should be a control with this label") + } + + pub fn emit_continue_target_and_patch(&mut self, emit: &mut InstructionWriter) { + let innermost = self.innermost(); + match innermost { + Control::Label(_) => panic!( + "Should not emit continue on a label. This should be caught by JS early errors" + ), + Control::Loop(control) => control.emit_continue_target_and_patch(emit), + } + } + + fn pop_control(&mut self) -> Control { + self.control_stack + .pop() + .expect("There should be at least one control structure") + } + + pub fn close_loop(&mut self, emit: &mut InstructionWriter) { + let mut innermost = self.pop_control(); + match innermost { + Control::Label(_) => panic!("Tried to close a loop, found a label."), + Control::Loop(ref mut control) => { + control.close_loop(emit); + control.emit_break_target_and_patch(emit); + } + } + } + + pub fn close_label(&mut self, emit: &mut InstructionWriter) { + let mut innermost = self.pop_control(); + match innermost { + Control::Label(ref mut control) => control.emit_break_target_and_patch(emit), + Control::Loop(_) => panic!("Tried to close a label, found a loop."), + } + } + + pub fn innermost(&mut self) -> &mut Control { + self.control_stack + .last_mut() + .expect("There should be at least one loop") + } +} + +struct RegisteredJump<F1> +where + F1: Fn(&mut AstEmitter, BytecodeOffset), +{ + kind: JumpKind, + // This callback registers the bytecode offset of the jump in a list of bytecode offsets + // associated with a loop or a label. + register_offset: F1, +} + +impl<F1> Jump for RegisteredJump<F1> +where + F1: Fn(&mut AstEmitter, BytecodeOffset), +{ + fn jump_kind(&mut self) -> &JumpKind { + &self.kind + } +} + +impl<F1> RegisteredJump<F1> +where + F1: Fn(&mut AstEmitter, BytecodeOffset), +{ + pub fn emit(&mut self, emitter: &mut AstEmitter) { + let offset = emitter.emit.bytecode_offset(); + self.emit_jump(emitter); + (self.register_offset)(emitter, offset); + } +} + +// Struct for multiple jumps that point to the same target. Examples are breaks and loop conditions. +pub struct BreakEmitter { + pub label: Option<SourceAtomSetIndex>, +} + +impl BreakEmitter { + pub fn emit(&mut self, emitter: &mut AstEmitter) { + NonLocalExitControl { + registered_jump: RegisteredJump { + kind: JumpKind::Goto, + register_offset: |emitter, offset| match self.label { + Some(label) => emitter.control_stack.register_labelled_break(label, offset), + None => emitter.control_stack.register_break(offset), + }, + }, + } + .emit(emitter, self.label); + } +} + +pub struct ContinueEmitter { + pub label: Option<SourceAtomSetIndex>, +} + +impl ContinueEmitter { + pub fn emit(&mut self, emitter: &mut AstEmitter) { + NonLocalExitControl { + registered_jump: RegisteredJump { + kind: JumpKind::Goto, + register_offset: |emitter, offset| match self.label { + Some(label) => emitter + .control_stack + .register_labelled_continue(label, offset), + None => emitter.control_stack.register_continue(offset), + }, + }, + } + .emit(emitter, self.label); + } +} + +pub struct WhileEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub enclosing_emitter_scope_depth: EmitterScopeDepth, + pub test: F1, + pub block: F2, +} +impl<F1, F2> WhileEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(&mut self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + emitter + .control_stack + .open_loop(&mut emitter.emit, self.enclosing_emitter_scope_depth); + + (self.test)(emitter)?; + + // add a registered jump for the conditional statement + RegisteredJump { + kind: JumpKind::JumpIfFalse, + register_offset: |emitter, offset| emitter.control_stack.register_break(offset), + } + .emit(emitter); + + (self.block)(emitter)?; + + emitter + .control_stack + .emit_continue_target_and_patch(&mut emitter.emit); + + // Merge point + emitter.control_stack.close_loop(&mut emitter.emit); + Ok(()) + } +} + +pub struct DoWhileEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub enclosing_emitter_scope_depth: EmitterScopeDepth, + pub block: F2, + pub test: F1, +} +impl<F1, F2> DoWhileEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(&mut self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + emitter + .control_stack + .open_loop(&mut emitter.emit, self.enclosing_emitter_scope_depth); + + (self.block)(emitter)?; + + emitter + .control_stack + .emit_continue_target_and_patch(&mut emitter.emit); + + (self.test)(emitter)?; + + // add a registered jump for the conditional statement + RegisteredJump { + kind: JumpKind::JumpIfFalse, + register_offset: |emitter, offset| emitter.control_stack.register_break(offset), + } + .emit(emitter); + + // Merge point after cond fails + emitter.control_stack.close_loop(&mut emitter.emit); + Ok(()) + } +} + +pub struct CForEmitter<'a, CondT, ExprT, InitFn, TestFn, UpdateFn, BlockFn> +where + InitFn: Fn(&mut AstEmitter, &CondT) -> Result<(), EmitError>, + TestFn: Fn(&mut AstEmitter, &ExprT) -> Result<(), EmitError>, + UpdateFn: Fn(&mut AstEmitter, &ExprT) -> Result<(), EmitError>, + BlockFn: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub enclosing_emitter_scope_depth: EmitterScopeDepth, + pub maybe_init: &'a Option<CondT>, + pub maybe_test: &'a Option<ExprT>, + pub maybe_update: &'a Option<ExprT>, + pub init: InitFn, + pub test: TestFn, + pub update: UpdateFn, + pub block: BlockFn, +} +impl<'a, CondT, ExprT, InitFn, TestFn, UpdateFn, BlockFn> + CForEmitter<'a, CondT, ExprT, InitFn, TestFn, UpdateFn, BlockFn> +where + InitFn: Fn(&mut AstEmitter, &CondT) -> Result<(), EmitError>, + TestFn: Fn(&mut AstEmitter, &ExprT) -> Result<(), EmitError>, + UpdateFn: Fn(&mut AstEmitter, &ExprT) -> Result<(), EmitError>, + BlockFn: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(&mut self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // Initialize the variable either by running an expression or assigning + // ie) for(foo(); <test>; <update>) or for(var x = 0; <test>; <update) + if let Some(init) = self.maybe_init { + (self.init)(emitter, init)?; + } + + // Emit loop head + emitter + .control_stack + .open_loop(&mut emitter.emit, self.enclosing_emitter_scope_depth); + + // if there is a test condition (ie x < 3) emit it + if let Some(test) = self.maybe_test { + (self.test)(emitter, &test)?; + + // add a registered jump for the conditional statement + RegisteredJump { + kind: JumpKind::JumpIfFalse, + register_offset: |emitter, offset| emitter.control_stack.register_break(offset), + } + .emit(emitter); + } + + // emit the body of the for loop. + (self.block)(emitter)?; + + // emit the target for any continues emitted in the body before evaluating + // the update (if there is one) and continuing from the top of the loop. + emitter + .control_stack + .emit_continue_target_and_patch(&mut emitter.emit); + + if let Some(update) = self.maybe_update { + (self.update)(emitter, &update)?; + } + + // Merge point after test fails (or there is a break statement) + emitter.control_stack.close_loop(&mut emitter.emit); + Ok(()) + } +} + +pub struct LabelEmitter<F1> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub enclosing_emitter_scope_depth: EmitterScopeDepth, + pub name: SourceAtomSetIndex, + pub body: F1, +} + +impl<F1> LabelEmitter<F1> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(&mut self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + emitter.control_stack.open_label( + self.name, + &mut emitter.emit, + self.enclosing_emitter_scope_depth, + ); + + (self.body)(emitter)?; + + emitter.control_stack.close_label(&mut emitter.emit); + Ok(()) + } +} + +pub struct NonLocalExitControl<F1> +where + F1: Fn(&mut AstEmitter, BytecodeOffset), +{ + registered_jump: RegisteredJump<F1>, +} + +impl<F1> NonLocalExitControl<F1> +where + F1: Fn(&mut AstEmitter, BytecodeOffset), +{ + pub fn emit(&mut self, emitter: &mut AstEmitter, label: Option<SourceAtomSetIndex>) { + // Step 1: find the enclosing emitter scope + let enclosing_emitter_scope_depth = match label { + Some(label) => emitter + .control_stack + .find_labelled_control(label) + .enclosing_emitter_scope_depth(), + None => emitter + .control_stack + .innermost() + .enclosing_emitter_scope_depth(), + }; + + // Step 2: find the current emitter scope + let mut parent_scope_note_index = emitter.scope_stack.get_current_scope_note_index(); + + // Step 3: iterate over scopes that have been entered since the + // enclosing scope, add a scope note hole for each one as we exit + let mut holes = Vec::new(); + for item in emitter + .scope_stack + .walk_up_to_including(enclosing_emitter_scope_depth) + { + // We're entering `item.outer` as a scope hole of `item.inner`. + + let hole_scope_note_index = match item.inner { + EmitterScope::Global(_) => panic!("global shouldn't be enclosed by other scope"), + EmitterScope::Lexical(scope) => emitter.emit.enter_scope_hole_from_lexical( + &item.outer.scope_note_index(), + parent_scope_note_index, + scope.has_environment_object(), + ), + }; + holes.push(hole_scope_note_index); + parent_scope_note_index = Some(hole_scope_note_index); + } + + // Step 4: perform the jump + self.registered_jump.emit(emitter); + + // Step 5: close each scope hole after the jump + for hole_scope_note_index in holes.iter() { + emitter.emit.leave_scope_hole(*hole_scope_note_index); + } + } +} diff --git a/third_party/rust/jsparagus-emitter/src/dis.rs b/third_party/rust/jsparagus-emitter/src/dis.rs new file mode 100644 index 0000000000..f9978b8d9c --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/dis.rs @@ -0,0 +1,34 @@ +use std::convert::TryFrom; +use std::fmt::Write; +use stencil::opcode::Opcode; + +/// Return a string form of the given bytecode. +pub fn dis(bc: &[u8]) -> String { + let mut result = String::new(); + let mut iter = bc.iter(); + let mut offset = 0; + loop { + let len = match iter.next() { + Some(byte) => match Opcode::try_from(*byte) { + Ok(op) => { + write!(&mut result, "{}", format!("{:05}: {:?}", offset, op)).unwrap(); + offset = offset + op.instruction_length(); + op.instruction_length() + } + Err(()) => { + write!(&mut result, "{}", byte).unwrap(); + 1 + } + }, + None => break, + }; + + for _ in 1..len { + write!(&mut result, " {}", iter.next().unwrap()).unwrap(); + } + + writeln!(&mut result).unwrap(); + } + + result +} diff --git a/third_party/rust/jsparagus-emitter/src/emitter.rs b/third_party/rust/jsparagus-emitter/src/emitter.rs new file mode 100644 index 0000000000..1bcefab1ae --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/emitter.rs @@ -0,0 +1,1461 @@ +//! Low-level bytecode emitter, used by ast_builder. +//! +//! This API makes it easy to emit correct individual bytecode instructions. + +// Most of this functionality isn't used yet. +#![allow(dead_code)] + +use ast::source_atom_set::SourceAtomSetIndex; +use byteorder::{ByteOrder, LittleEndian}; +use std::cmp; +use std::collections::HashMap; +use std::convert::TryFrom; +use std::convert::TryInto; +use std::fmt; +use stencil::bytecode_offset::{BytecodeOffset, BytecodeOffsetDiff}; +use stencil::frame_slot::FrameSlot; +use stencil::gcthings::{GCThingIndex, GCThingList}; +use stencil::opcode::Opcode; +use stencil::regexp::RegExpIndex; +use stencil::scope::ScopeIndex; +use stencil::scope_notes::{ScopeNoteIndex, ScopeNoteList}; +use stencil::script::{ + ImmutableScriptData, ImmutableScriptDataList, ScriptStencil, ScriptStencilIndex, SourceExtent, +}; + +// WARNING +// The following section is generated by update_stencil.py. +// Do mot modify manually. +// +// @@@@ BEGIN TYPES @@@@ +#[derive(Debug, Clone, Copy)] +pub enum AsyncFunctionResolveKind { + Fulfill = 0, + Reject = 1, +} + +#[derive(Debug, Clone, Copy)] +pub enum CheckIsObjectKind { + IteratorNext = 0, + IteratorReturn = 1, + IteratorThrow = 2, + GetIterator = 3, + GetAsyncIterator = 4, +} + +#[derive(Debug, Clone, Copy)] +pub enum CompletionKind { + Normal = 0, + Return = 1, + Throw = 2, +} + +#[derive(Debug, Clone, Copy)] +pub enum FunctionPrefixKind { + None = 0, + Get = 1, + Set = 2, +} + +#[derive(Debug, Clone, Copy)] +pub enum GeneratorResumeKind { + Next = 0, + Throw = 1, + Return = 2, +} + +#[derive(Debug, Clone, Copy)] +pub enum ThrowMsgKind { + AssignToCall = 0, + IteratorNoThrow = 1, + CantDeleteSuper = 2, + PrivateDoubleInit = 3, + PrivateBrandDoubleInit = 4, + MissingPrivateOnGet = 5, + MissingPrivateOnSet = 6, + AssignToPrivateMethod = 7, + DecoratorInvalidReturnType = 8, +} + +#[derive(Debug, Clone, Copy)] +pub enum ThrowCondition { + ThrowHas = 0, + ThrowHasNot = 1, + OnlyCheckRhs = 2, +} + +#[derive(Debug, Clone, Copy)] +pub enum TryNoteKind { + Catch = 0, + Finally = 1, + ForIn = 2, + Destructuring = 3, + ForOf = 4, + ForOfIterClose = 5, + Loop = 6, +} + +#[derive(Debug, Clone, Copy)] +pub enum SymbolCode { + IsConcatSpreadable = 0, + Iterator = 1, + Match = 2, + Replace = 3, + Search = 4, + Species = 5, + HasInstance = 6, + Split = 7, + ToPrimitive = 8, + ToStringTag = 9, + Unscopables = 10, + AsyncIterator = 11, + MatchAll = 12, +} + +#[derive(Debug, Clone, Copy)] +pub enum SrcNoteType { + Null = 0, + AssignOp = 1, + ColSpan = 2, + NewLine = 3, + SetLine = 4, + Breakpoint = 5, + StepSep = 6, + Unused7 = 7, + XDelta = 8, +} + +// @@@@ END TYPES @@@@ + +#[allow(non_camel_case_types)] +pub type u24 = u32; + +/// Low-level bytecode emitter. +pub struct InstructionWriter { + bytecode: Vec<u8>, + + /// To de-duplicate atoms in gcthings list, note the index for each atom. + atom_to_gcindex_map: HashMap<SourceAtomSetIndex, GCThingIndex>, + + gcthings: GCThingList, + scope_notes: ScopeNoteList, + + last_jump_target_offset: Option<BytecodeOffset>, + + main_offset: BytecodeOffset, + + /// The maximum number of fixed frame slots. + max_fixed_slots: FrameSlot, + + /// Stack depth after the instructions emitted so far. + stack_depth: usize, + + /// Maximum stack_depth at any point in the instructions emitted so far. + maximum_stack_depth: usize, + + body_scope_index: Option<GCThingIndex>, + + /// Number of JOF_IC instructions emitted so far. + num_ic_entries: usize, +} + +#[derive(Debug)] +pub struct EmitOptions { + pub no_script_rval: bool, + pub extent: SourceExtent, +} +impl EmitOptions { + pub fn new(extent: SourceExtent) -> Self { + Self { + no_script_rval: false, + extent, + } + } +} + +/// The error of bytecode-compilation. +#[derive(Clone, Debug)] +pub enum EmitError { + NotImplemented(&'static str), +} + +impl fmt::Display for EmitError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + EmitError::NotImplemented(message) => write!(f, "not implemented: {}", message), + } + } +} + +impl InstructionWriter { + pub fn new() -> Self { + Self { + bytecode: Vec::new(), + gcthings: GCThingList::new(), + atom_to_gcindex_map: HashMap::new(), + scope_notes: ScopeNoteList::new(), + last_jump_target_offset: None, + main_offset: BytecodeOffset::from(0usize), + max_fixed_slots: FrameSlot::new(0), + stack_depth: 0, + maximum_stack_depth: 0, + body_scope_index: None, + num_ic_entries: 0, + } + } + + fn write_i8(&mut self, value: i8) { + self.write_u8(value as u8); + } + + fn write_u8(&mut self, value: u8) { + self.bytecode.push(value); + } + + fn write_u16(&mut self, value: u16) { + self.bytecode.extend_from_slice(&value.to_le_bytes()); + } + + fn write_u24(&mut self, value: u24) { + let slice = value.to_le_bytes(); + assert!(slice.len() == 4 && slice[3] == 0); + self.bytecode.extend_from_slice(&slice[0..3]); + } + + fn write_i32(&mut self, value: i32) { + self.bytecode.extend_from_slice(&value.to_le_bytes()); + } + + fn write_u32(&mut self, value: u32) { + self.bytecode.extend_from_slice(&value.to_le_bytes()); + } + + fn write_g_c_thing_index(&mut self, value: GCThingIndex) { + self.write_u32(usize::from(value) as u32); + } + + fn write_offset(&mut self, offset: i32) { + self.write_i32(offset); + } + + fn write_bytecode_offset_diff(&mut self, offset: BytecodeOffsetDiff) { + self.write_i32(i32::from(offset)); + } + + fn write_f64(&mut self, val: f64) { + self.bytecode + .extend_from_slice(&val.to_bits().to_le_bytes()); + } + + fn write_ic_index(&mut self) { + self.write_u32(self.num_ic_entries.try_into().unwrap()); + } + + fn emit_op(&mut self, opcode: Opcode) { + let nuses: isize = opcode.nuses(); + assert!(nuses >= 0); + self.emit_op_common(opcode, nuses as usize); + } + + fn emit_argc_op(&mut self, opcode: Opcode, argc: u16) { + assert!(opcode.has_argc()); + assert_eq!(opcode.nuses(), -1); + let nuses = match opcode { + Opcode::Call + | Opcode::CallIgnoresRv + | Opcode::Eval + | Opcode::CallIter + | Opcode::StrictEval => { + // callee, this, arguments... + 2 + (argc as usize) + } + + Opcode::New | Opcode::SuperCall => { + // callee, isConstructing, arguments..., newtarget + 2 + (argc as usize) + 1 + } + + _ => panic!("Unsupported opcode"), + }; + self.emit_op_common(opcode, nuses); + } + + fn emit_pop_n_op(&mut self, opcode: Opcode, n: u16) { + assert_eq!(opcode.nuses(), -1); + debug_assert_eq!(opcode, Opcode::PopN); + self.emit_op_common(opcode, n as usize); + } + + fn emit_op_common(&mut self, opcode: Opcode, nuses: usize) { + assert!( + self.stack_depth >= nuses as usize, + "InstructionWriter misuse! Not enough arguments on the stack." + ); + self.stack_depth -= nuses as usize; + + let ndefs = opcode.ndefs(); + if ndefs > 0 { + self.stack_depth += ndefs; + if self.stack_depth > self.maximum_stack_depth { + self.maximum_stack_depth = self.stack_depth; + } + } + + if opcode.has_ic_entry() { + self.num_ic_entries += 1; + } + + self.bytecode.push(opcode.to_byte()); + } + + fn set_last_jump_target_offset(&mut self, target: BytecodeOffset) { + self.last_jump_target_offset = Some(target); + } + + fn get_end_of_bytecode(&mut self, offset: BytecodeOffset) -> usize { + // find the offset after the end of bytecode associated with this offset. + let target_opcode = Opcode::try_from(self.bytecode[offset.offset]).unwrap(); + offset.offset + target_opcode.instruction_length() + } + + pub fn emit_jump_target_and_patch(&mut self, jumplist: &Vec<BytecodeOffset>) { + let mut target = self.bytecode_offset(); + let last_jump = self.last_jump_target_offset; + match last_jump { + Some(offset) => { + if self.get_end_of_bytecode(offset) != target.offset { + self.jump_target(); + self.set_last_jump_target_offset(target); + } else { + target = offset; + } + } + None => { + self.jump_target(); + self.set_last_jump_target_offset(target); + } + } + + for jump in jumplist { + self.patch_jump_to_target(target, *jump); + } + } + + pub fn patch_jump_to_target(&mut self, target: BytecodeOffset, jump: BytecodeOffset) { + let diff = target.diff_from(jump).into(); + let index = jump.offset + 1; + // FIXME: Use native endian instead of little endian + LittleEndian::write_i32(&mut self.bytecode[index..index + 4], diff); + } + + pub fn bytecode_offset(&mut self) -> BytecodeOffset { + BytecodeOffset::from(self.bytecode.len()) + } + + pub fn stack_depth(&self) -> usize { + self.stack_depth + } + + pub fn set_stack_depth(&mut self, depth: usize) { + self.stack_depth = depth; + } + + // Public methods to emit each instruction. + + pub fn emit_boolean(&mut self, value: bool) { + self.emit_op(if value { Opcode::True } else { Opcode::False }); + } + + pub fn emit_unary_op(&mut self, opcode: Opcode) { + assert!(opcode.is_simple_unary_operator()); + self.emit_op(opcode); + } + + pub fn emit_binary_op(&mut self, opcode: Opcode) { + assert!(opcode.is_simple_binary_operator()); + debug_assert_eq!(opcode.nuses(), 2); + debug_assert_eq!(opcode.ndefs(), 1); + self.emit_op(opcode); + } + + pub fn table_switch( + &mut self, + _len: i32, + _low: i32, + _high: i32, + _first_resume_index: u24, + ) -> Result<(), EmitError> { + Err(EmitError::NotImplemented("TODO: table_switch")) + } + + pub fn numeric(&mut self, value: f64) { + if value.is_finite() && value.fract() == 0.0 { + if i8::min_value() as f64 <= value && value <= i8::max_value() as f64 { + match value as i8 { + 0 => self.zero(), + 1 => self.one(), + i => self.int8(i), + } + return; + } + if 0.0 <= value { + if value <= u16::max_value() as f64 { + self.uint16(value as u16); + return; + } + if value <= 0x00ffffff as f64 { + self.uint24(value as u24); + return; + } + } + if i32::min_value() as f64 <= value && value <= i32::max_value() as f64 { + self.int32(value as i32); + return; + } + } + self.double_(value); + } + + // WARNING + // The following section is generated by update_stencil.py. + // Do mot modify manually. + // + // @@@@ BEGIN METHODS @@@@ + pub fn undefined(&mut self) { + self.emit_op(Opcode::Undefined); + } + + pub fn null(&mut self) { + self.emit_op(Opcode::Null); + } + + pub fn int32(&mut self, val: i32) { + self.emit_op(Opcode::Int32); + self.write_i32(val); + } + + pub fn zero(&mut self) { + self.emit_op(Opcode::Zero); + } + + pub fn one(&mut self) { + self.emit_op(Opcode::One); + } + + pub fn int8(&mut self, val: i8) { + self.emit_op(Opcode::Int8); + self.write_i8(val); + } + + pub fn uint16(&mut self, val: u16) { + self.emit_op(Opcode::Uint16); + self.write_u16(val); + } + + pub fn uint24(&mut self, val: u24) { + self.emit_op(Opcode::Uint24); + self.write_u24(val); + } + + pub fn double_(&mut self, val: f64) { + self.emit_op(Opcode::Double); + self.write_f64(val); + } + + pub fn big_int(&mut self, big_int_index: u32) { + self.emit_op(Opcode::BigInt); + self.write_u32(big_int_index); + } + + pub fn string(&mut self, atom_index: GCThingIndex) { + self.emit_op(Opcode::String); + self.write_g_c_thing_index(atom_index); + } + + pub fn symbol(&mut self, symbol: u8) { + self.emit_op(Opcode::Symbol); + self.write_u8(symbol); + } + + pub fn typeof_(&mut self) { + self.emit_op(Opcode::Typeof); + } + + pub fn typeof_expr(&mut self) { + self.emit_op(Opcode::TypeofExpr); + } + + pub fn inc(&mut self) { + self.emit_op(Opcode::Inc); + } + + pub fn dec(&mut self) { + self.emit_op(Opcode::Dec); + } + + pub fn to_property_key(&mut self) { + self.emit_op(Opcode::ToPropertyKey); + } + + pub fn to_numeric(&mut self) { + self.emit_op(Opcode::ToNumeric); + } + + pub fn to_string(&mut self) { + self.emit_op(Opcode::ToString); + } + + pub fn is_null_or_undefined(&mut self) { + self.emit_op(Opcode::IsNullOrUndefined); + } + + pub fn global_this(&mut self) { + self.emit_op(Opcode::GlobalThis); + } + + pub fn non_syntactic_global_this(&mut self) { + self.emit_op(Opcode::NonSyntacticGlobalThis); + } + + pub fn new_target(&mut self) { + self.emit_op(Opcode::NewTarget); + } + + pub fn dynamic_import(&mut self) { + self.emit_op(Opcode::DynamicImport); + } + + pub fn import_meta(&mut self) { + self.emit_op(Opcode::ImportMeta); + } + + pub fn new_init(&mut self) { + self.emit_op(Opcode::NewInit); + } + + pub fn new_object(&mut self, shape_index: GCThingIndex) { + self.emit_op(Opcode::NewObject); + self.write_g_c_thing_index(shape_index); + } + + pub fn object(&mut self, object_index: GCThingIndex) { + self.emit_op(Opcode::Object); + self.write_g_c_thing_index(object_index); + } + + pub fn obj_with_proto(&mut self) { + self.emit_op(Opcode::ObjWithProto); + } + + pub fn init_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitProp); + self.write_g_c_thing_index(name_index); + } + + pub fn init_hidden_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitHiddenProp); + self.write_g_c_thing_index(name_index); + } + + pub fn init_locked_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitLockedProp); + self.write_g_c_thing_index(name_index); + } + + pub fn init_elem(&mut self) { + self.emit_op(Opcode::InitElem); + } + + pub fn init_hidden_elem(&mut self) { + self.emit_op(Opcode::InitHiddenElem); + } + + pub fn init_locked_elem(&mut self) { + self.emit_op(Opcode::InitLockedElem); + } + + pub fn init_prop_getter(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitPropGetter); + self.write_g_c_thing_index(name_index); + } + + pub fn init_hidden_prop_getter(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitHiddenPropGetter); + self.write_g_c_thing_index(name_index); + } + + pub fn init_elem_getter(&mut self) { + self.emit_op(Opcode::InitElemGetter); + } + + pub fn init_hidden_elem_getter(&mut self) { + self.emit_op(Opcode::InitHiddenElemGetter); + } + + pub fn init_prop_setter(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitPropSetter); + self.write_g_c_thing_index(name_index); + } + + pub fn init_hidden_prop_setter(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitHiddenPropSetter); + self.write_g_c_thing_index(name_index); + } + + pub fn init_elem_setter(&mut self) { + self.emit_op(Opcode::InitElemSetter); + } + + pub fn init_hidden_elem_setter(&mut self) { + self.emit_op(Opcode::InitHiddenElemSetter); + } + + pub fn get_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetProp); + self.write_g_c_thing_index(name_index); + } + + pub fn get_elem(&mut self) { + self.emit_op(Opcode::GetElem); + } + + pub fn set_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::SetProp); + self.write_g_c_thing_index(name_index); + } + + pub fn strict_set_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::StrictSetProp); + self.write_g_c_thing_index(name_index); + } + + pub fn set_elem(&mut self) { + self.emit_op(Opcode::SetElem); + } + + pub fn strict_set_elem(&mut self) { + self.emit_op(Opcode::StrictSetElem); + } + + pub fn del_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::DelProp); + self.write_g_c_thing_index(name_index); + } + + pub fn strict_del_prop(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::StrictDelProp); + self.write_g_c_thing_index(name_index); + } + + pub fn del_elem(&mut self) { + self.emit_op(Opcode::DelElem); + } + + pub fn strict_del_elem(&mut self) { + self.emit_op(Opcode::StrictDelElem); + } + + pub fn has_own(&mut self) { + self.emit_op(Opcode::HasOwn); + } + + pub fn check_private_field(&mut self, throw_condition: ThrowCondition, msg_kind: ThrowMsgKind) { + self.emit_op(Opcode::CheckPrivateField); + self.write_u8(throw_condition as u8); + self.write_u8(msg_kind as u8); + } + + pub fn new_private_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::NewPrivateName); + self.write_g_c_thing_index(name_index); + } + + pub fn super_base(&mut self) { + self.emit_op(Opcode::SuperBase); + } + + pub fn get_prop_super(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetPropSuper); + self.write_g_c_thing_index(name_index); + } + + pub fn get_elem_super(&mut self) { + self.emit_op(Opcode::GetElemSuper); + } + + pub fn set_prop_super(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::SetPropSuper); + self.write_g_c_thing_index(name_index); + } + + pub fn strict_set_prop_super(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::StrictSetPropSuper); + self.write_g_c_thing_index(name_index); + } + + pub fn set_elem_super(&mut self) { + self.emit_op(Opcode::SetElemSuper); + } + + pub fn strict_set_elem_super(&mut self) { + self.emit_op(Opcode::StrictSetElemSuper); + } + + pub fn iter(&mut self) { + self.emit_op(Opcode::Iter); + } + + pub fn more_iter(&mut self) { + self.emit_op(Opcode::MoreIter); + } + + pub fn is_no_iter(&mut self) { + self.emit_op(Opcode::IsNoIter); + } + + pub fn end_iter(&mut self) { + self.emit_op(Opcode::EndIter); + } + + pub fn close_iter(&mut self, kind: CompletionKind) { + self.emit_op(Opcode::CloseIter); + self.write_u8(kind as u8); + } + + pub fn check_is_obj(&mut self, kind: CheckIsObjectKind) { + self.emit_op(Opcode::CheckIsObj); + self.write_u8(kind as u8); + } + + pub fn check_obj_coercible(&mut self) { + self.emit_op(Opcode::CheckObjCoercible); + } + + pub fn to_async_iter(&mut self) { + self.emit_op(Opcode::ToAsyncIter); + } + + pub fn mutate_proto(&mut self) { + self.emit_op(Opcode::MutateProto); + } + + pub fn new_array(&mut self, length: u32) { + self.emit_op(Opcode::NewArray); + self.write_u32(length); + } + + pub fn init_elem_array(&mut self, index: u32) { + self.emit_op(Opcode::InitElemArray); + self.write_u32(index); + } + + pub fn init_elem_inc(&mut self) { + self.emit_op(Opcode::InitElemInc); + } + + pub fn hole(&mut self) { + self.emit_op(Opcode::Hole); + } + + pub fn reg_exp(&mut self, regexp_index: GCThingIndex) { + self.emit_op(Opcode::RegExp); + self.write_g_c_thing_index(regexp_index); + } + + pub fn lambda(&mut self, func_index: GCThingIndex) { + self.emit_op(Opcode::Lambda); + self.write_g_c_thing_index(func_index); + } + + pub fn set_fun_name(&mut self, prefix_kind: FunctionPrefixKind) { + self.emit_op(Opcode::SetFunName); + self.write_u8(prefix_kind as u8); + } + + pub fn init_home_object(&mut self) { + self.emit_op(Opcode::InitHomeObject); + } + + pub fn check_class_heritage(&mut self) { + self.emit_op(Opcode::CheckClassHeritage); + } + + pub fn fun_with_proto(&mut self, func_index: GCThingIndex) { + self.emit_op(Opcode::FunWithProto); + self.write_g_c_thing_index(func_index); + } + + pub fn builtin_object(&mut self, kind: u8) { + self.emit_op(Opcode::BuiltinObject); + self.write_u8(kind); + } + + pub fn call(&mut self, argc: u16) { + self.emit_argc_op(Opcode::Call, argc); + self.write_u16(argc); + } + + pub fn call_content(&mut self, argc: u16) { + self.emit_argc_op(Opcode::CallContent, argc); + self.write_u16(argc); + } + + pub fn call_iter(&mut self, argc: u16) { + self.emit_argc_op(Opcode::CallIter, argc); + self.write_u16(argc); + } + + pub fn call_content_iter(&mut self, argc: u16) { + self.emit_argc_op(Opcode::CallContentIter, argc); + self.write_u16(argc); + } + + pub fn call_ignores_rv(&mut self, argc: u16) { + self.emit_argc_op(Opcode::CallIgnoresRv, argc); + self.write_u16(argc); + } + + pub fn spread_call(&mut self) { + self.emit_op(Opcode::SpreadCall); + } + + pub fn optimize_spread_call(&mut self) { + self.emit_op(Opcode::OptimizeSpreadCall); + } + + pub fn eval(&mut self, argc: u16) { + self.emit_argc_op(Opcode::Eval, argc); + self.write_u16(argc); + } + + pub fn spread_eval(&mut self) { + self.emit_op(Opcode::SpreadEval); + } + + pub fn strict_eval(&mut self, argc: u16) { + self.emit_argc_op(Opcode::StrictEval, argc); + self.write_u16(argc); + } + + pub fn strict_spread_eval(&mut self) { + self.emit_op(Opcode::StrictSpreadEval); + } + + pub fn implicit_this(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::ImplicitThis); + self.write_g_c_thing_index(name_index); + } + + pub fn call_site_obj(&mut self, object_index: GCThingIndex) { + self.emit_op(Opcode::CallSiteObj); + self.write_g_c_thing_index(object_index); + } + + pub fn is_constructing(&mut self) { + self.emit_op(Opcode::IsConstructing); + } + + pub fn new_(&mut self, argc: u16) { + self.emit_argc_op(Opcode::New, argc); + self.write_u16(argc); + } + + pub fn new_content(&mut self, argc: u16) { + self.emit_argc_op(Opcode::NewContent, argc); + self.write_u16(argc); + } + + pub fn super_call(&mut self, argc: u16) { + self.emit_argc_op(Opcode::SuperCall, argc); + self.write_u16(argc); + } + + pub fn spread_new(&mut self) { + self.emit_op(Opcode::SpreadNew); + } + + pub fn spread_super_call(&mut self) { + self.emit_op(Opcode::SpreadSuperCall); + } + + pub fn super_fun(&mut self) { + self.emit_op(Opcode::SuperFun); + } + + pub fn check_this_reinit(&mut self) { + self.emit_op(Opcode::CheckThisReinit); + } + + pub fn generator(&mut self) { + self.emit_op(Opcode::Generator); + } + + pub fn initial_yield(&mut self, resume_index: u24) { + self.emit_op(Opcode::InitialYield); + self.write_u24(resume_index); + } + + pub fn after_yield(&mut self) { + self.emit_op(Opcode::AfterYield); + self.write_ic_index(); + } + + pub fn final_yield_rval(&mut self) { + self.emit_op(Opcode::FinalYieldRval); + } + + pub fn yield_(&mut self, resume_index: u24) { + self.emit_op(Opcode::Yield); + self.write_u24(resume_index); + } + + pub fn is_gen_closing(&mut self) { + self.emit_op(Opcode::IsGenClosing); + } + + pub fn async_await(&mut self) { + self.emit_op(Opcode::AsyncAwait); + } + + pub fn async_resolve(&mut self, fulfill_or_reject: AsyncFunctionResolveKind) { + self.emit_op(Opcode::AsyncResolve); + self.write_u8(fulfill_or_reject as u8); + } + + pub fn await_(&mut self, resume_index: u24) { + self.emit_op(Opcode::Await); + self.write_u24(resume_index); + } + + pub fn can_skip_await(&mut self) { + self.emit_op(Opcode::CanSkipAwait); + } + + pub fn maybe_extract_await_value(&mut self) { + self.emit_op(Opcode::MaybeExtractAwaitValue); + } + + pub fn resume_kind(&mut self, resume_kind: GeneratorResumeKind) { + self.emit_op(Opcode::ResumeKind); + self.write_u8(resume_kind as u8); + } + + pub fn check_resume_kind(&mut self) { + self.emit_op(Opcode::CheckResumeKind); + } + + pub fn resume(&mut self) { + self.emit_op(Opcode::Resume); + } + + pub fn jump_target(&mut self) { + self.emit_op(Opcode::JumpTarget); + self.write_ic_index(); + } + + pub fn loop_head(&mut self, depth_hint: u8) { + self.emit_op(Opcode::LoopHead); + self.write_ic_index(); + self.write_u8(depth_hint); + } + + pub fn goto_(&mut self, offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::Goto); + self.write_bytecode_offset_diff(offset); + } + + pub fn jump_if_false(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::JumpIfFalse); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn jump_if_true(&mut self, offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::JumpIfTrue); + self.write_bytecode_offset_diff(offset); + } + + pub fn and_(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::And); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn or_(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::Or); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn coalesce(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::Coalesce); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn case_(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::Case); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn default_(&mut self, forward_offset: BytecodeOffsetDiff) { + self.emit_op(Opcode::Default); + self.write_bytecode_offset_diff(forward_offset); + } + + pub fn return_(&mut self) { + self.emit_op(Opcode::Return); + } + + pub fn get_rval(&mut self) { + self.emit_op(Opcode::GetRval); + } + + pub fn set_rval(&mut self) { + self.emit_op(Opcode::SetRval); + } + + pub fn ret_rval(&mut self) { + self.emit_op(Opcode::RetRval); + } + + pub fn check_return(&mut self) { + self.emit_op(Opcode::CheckReturn); + } + + pub fn throw_(&mut self) { + self.emit_op(Opcode::Throw); + } + + pub fn throw_msg(&mut self, msg_number: ThrowMsgKind) { + self.emit_op(Opcode::ThrowMsg); + self.write_u8(msg_number as u8); + } + + pub fn throw_set_const(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::ThrowSetConst); + self.write_g_c_thing_index(name_index); + } + + pub fn try_(&mut self) { + self.emit_op(Opcode::Try); + } + + pub fn try_destructuring(&mut self) { + self.emit_op(Opcode::TryDestructuring); + } + + pub fn exception(&mut self) { + self.emit_op(Opcode::Exception); + } + + pub fn finally(&mut self) { + self.emit_op(Opcode::Finally); + } + + pub fn uninitialized(&mut self) { + self.emit_op(Opcode::Uninitialized); + } + + pub fn init_lexical(&mut self, localno: u24) { + self.emit_op(Opcode::InitLexical); + self.write_u24(localno); + } + + pub fn init_g_lexical(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::InitGLexical); + self.write_g_c_thing_index(name_index); + } + + pub fn init_aliased_lexical(&mut self, hops: u8, slot: u24) { + self.emit_op(Opcode::InitAliasedLexical); + self.write_u8(hops); + self.write_u24(slot); + } + + pub fn check_lexical(&mut self, localno: u24) { + self.emit_op(Opcode::CheckLexical); + self.write_u24(localno); + } + + pub fn check_aliased_lexical(&mut self, hops: u8, slot: u24) { + self.emit_op(Opcode::CheckAliasedLexical); + self.write_u8(hops); + self.write_u24(slot); + } + + pub fn check_this(&mut self) { + self.emit_op(Opcode::CheckThis); + } + + pub fn bind_g_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::BindGName); + self.write_g_c_thing_index(name_index); + } + + pub fn bind_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::BindName); + self.write_g_c_thing_index(name_index); + } + + pub fn get_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetName); + self.write_g_c_thing_index(name_index); + } + + pub fn get_g_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetGName); + self.write_g_c_thing_index(name_index); + } + + pub fn get_arg(&mut self, argno: u16) { + self.emit_op(Opcode::GetArg); + self.write_u16(argno); + } + + pub fn get_local(&mut self, localno: u24) { + self.emit_op(Opcode::GetLocal); + self.write_u24(localno); + } + + pub fn get_aliased_var(&mut self, hops: u8, slot: u24) { + self.emit_op(Opcode::GetAliasedVar); + self.write_u8(hops); + self.write_u24(slot); + } + + pub fn get_aliased_debug_var(&mut self, hops: u8, slot: u24) { + self.emit_op(Opcode::GetAliasedDebugVar); + self.write_u8(hops); + self.write_u24(slot); + } + + pub fn get_import(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetImport); + self.write_g_c_thing_index(name_index); + } + + pub fn get_bound_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetBoundName); + self.write_g_c_thing_index(name_index); + } + + pub fn get_intrinsic(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::GetIntrinsic); + self.write_g_c_thing_index(name_index); + } + + pub fn callee(&mut self) { + self.emit_op(Opcode::Callee); + } + + pub fn env_callee(&mut self, num_hops: u8) { + self.emit_op(Opcode::EnvCallee); + self.write_u8(num_hops); + } + + pub fn set_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::SetName); + self.write_g_c_thing_index(name_index); + } + + pub fn strict_set_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::StrictSetName); + self.write_g_c_thing_index(name_index); + } + + pub fn set_g_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::SetGName); + self.write_g_c_thing_index(name_index); + } + + pub fn strict_set_g_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::StrictSetGName); + self.write_g_c_thing_index(name_index); + } + + pub fn set_arg(&mut self, argno: u16) { + self.emit_op(Opcode::SetArg); + self.write_u16(argno); + } + + pub fn set_local(&mut self, localno: u24) { + self.emit_op(Opcode::SetLocal); + self.write_u24(localno); + } + + pub fn set_aliased_var(&mut self, hops: u8, slot: u24) { + self.emit_op(Opcode::SetAliasedVar); + self.write_u8(hops); + self.write_u24(slot); + } + + pub fn set_intrinsic(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::SetIntrinsic); + self.write_g_c_thing_index(name_index); + } + + pub fn push_lexical_env(&mut self, lexical_scope_index: GCThingIndex) { + self.emit_op(Opcode::PushLexicalEnv); + self.write_g_c_thing_index(lexical_scope_index); + } + + pub fn pop_lexical_env(&mut self) { + self.emit_op(Opcode::PopLexicalEnv); + } + + pub fn debug_leave_lexical_env(&mut self) { + self.emit_op(Opcode::DebugLeaveLexicalEnv); + } + + pub fn recreate_lexical_env(&mut self, lexical_scope_index: GCThingIndex) { + self.emit_op(Opcode::RecreateLexicalEnv); + self.write_g_c_thing_index(lexical_scope_index); + } + + pub fn freshen_lexical_env(&mut self, lexical_scope_index: GCThingIndex) { + self.emit_op(Opcode::FreshenLexicalEnv); + self.write_g_c_thing_index(lexical_scope_index); + } + + pub fn push_class_body_env(&mut self, lexical_scope_index: GCThingIndex) { + self.emit_op(Opcode::PushClassBodyEnv); + self.write_g_c_thing_index(lexical_scope_index); + } + + pub fn push_var_env(&mut self, scope_index: GCThingIndex) { + self.emit_op(Opcode::PushVarEnv); + self.write_g_c_thing_index(scope_index); + } + + pub fn enter_with(&mut self, static_with_index: GCThingIndex) { + self.emit_op(Opcode::EnterWith); + self.write_g_c_thing_index(static_with_index); + } + + pub fn leave_with(&mut self) { + self.emit_op(Opcode::LeaveWith); + } + + pub fn bind_var(&mut self) { + self.emit_op(Opcode::BindVar); + } + + pub fn global_or_eval_decl_instantiation(&mut self, last_fun: u32) { + self.emit_op(Opcode::GlobalOrEvalDeclInstantiation); + self.write_u32(last_fun); + } + + pub fn del_name(&mut self, name_index: GCThingIndex) { + self.emit_op(Opcode::DelName); + self.write_g_c_thing_index(name_index); + } + + pub fn arguments(&mut self) { + self.emit_op(Opcode::Arguments); + } + + pub fn rest(&mut self) { + self.emit_op(Opcode::Rest); + } + + pub fn function_this(&mut self) { + self.emit_op(Opcode::FunctionThis); + } + + pub fn pop(&mut self) { + self.emit_op(Opcode::Pop); + } + + pub fn pop_n(&mut self, n: u16) { + self.emit_pop_n_op(Opcode::PopN, n); + self.write_u16(n); + } + + pub fn dup(&mut self) { + self.emit_op(Opcode::Dup); + } + + pub fn dup2(&mut self) { + self.emit_op(Opcode::Dup2); + } + + pub fn dup_at(&mut self, n: u24) { + self.emit_op(Opcode::DupAt); + self.write_u24(n); + } + + pub fn swap(&mut self) { + self.emit_op(Opcode::Swap); + } + + pub fn pick(&mut self, n: u8) { + self.emit_op(Opcode::Pick); + self.write_u8(n); + } + + pub fn unpick(&mut self, n: u8) { + self.emit_op(Opcode::Unpick); + self.write_u8(n); + } + + pub fn nop(&mut self) { + self.emit_op(Opcode::Nop); + } + + pub fn lineno(&mut self, lineno: u32) { + self.emit_op(Opcode::Lineno); + self.write_u32(lineno); + } + + pub fn nop_destructuring(&mut self) { + self.emit_op(Opcode::NopDestructuring); + } + + pub fn force_interpreter(&mut self) { + self.emit_op(Opcode::ForceInterpreter); + } + + pub fn debug_check_self_hosted(&mut self) { + self.emit_op(Opcode::DebugCheckSelfHosted); + } + + pub fn debugger(&mut self) { + self.emit_op(Opcode::Debugger); + } + + // @@@@ END METHODS @@@@ + + pub fn get_atom_gcthing_index(&mut self, atom: SourceAtomSetIndex) -> GCThingIndex { + match self.atom_to_gcindex_map.get(&atom) { + Some(index) => *index, + None => { + let index = self.gcthings.push_atom(atom); + self.atom_to_gcindex_map.insert(atom, index); + index + } + } + } + + pub fn get_function_gcthing_index(&mut self, fun_index: ScriptStencilIndex) -> GCThingIndex { + self.gcthings.push_function(fun_index) + } + + pub fn get_regexp_gcthing_index(&mut self, regexp_index: RegExpIndex) -> GCThingIndex { + self.gcthings.push_regexp(regexp_index) + } + + fn update_max_frame_slots(&mut self, max_frame_slots: FrameSlot) { + self.max_fixed_slots = cmp::max(self.max_fixed_slots, max_frame_slots); + } + + pub fn enter_global_scope(&mut self, scope_index: ScopeIndex) { + let index = self.gcthings.push_scope(scope_index); + self.body_scope_index = Some(index); + } + + pub fn leave_global_scope(&self) {} + + pub fn enter_lexical_scope( + &mut self, + scope_index: ScopeIndex, + parent_scope_note_index: Option<ScopeNoteIndex>, + next_frame_slot: FrameSlot, + needs_environment_object: bool, + ) -> ScopeNoteIndex { + self.update_max_frame_slots(next_frame_slot); + + let gcthing_index = self.gcthings.push_scope(scope_index); + let offset = self.bytecode_offset(); + let note_index = + self.scope_notes + .enter_scope(gcthing_index, offset, parent_scope_note_index); + + if needs_environment_object { + self.push_lexical_env(gcthing_index); + } + + note_index + } + + pub fn leave_lexical_scope(&mut self, index: ScopeNoteIndex, needs_environment_object: bool) { + self.emit_leave_lexical_scope(needs_environment_object); + let offset = self.bytecode_offset(); + self.scope_notes.leave_scope(index, offset); + } + + fn emit_leave_lexical_scope(&mut self, needs_environment_object: bool) { + if needs_environment_object { + self.pop_lexical_env(); + } else { + self.debug_leave_lexical_env(); + } + } + + pub fn enter_scope_hole_from_lexical( + &mut self, + maybe_hole_scope_note_index: &Option<ScopeNoteIndex>, + parent_scope_note_index: Option<ScopeNoteIndex>, + needs_environment_object: bool, + ) -> ScopeNoteIndex { + self.emit_leave_lexical_scope(needs_environment_object); + self.enter_scope_hole(maybe_hole_scope_note_index, parent_scope_note_index) + } + + fn enter_scope_hole( + &mut self, + maybe_hole_scope_note_index: &Option<ScopeNoteIndex>, + parent_scope_note_index: Option<ScopeNoteIndex>, + ) -> ScopeNoteIndex { + let offset = self.bytecode_offset(); + + let gcthing_index = match maybe_hole_scope_note_index { + Some(index) => self.scope_notes.get_scope_hole_gcthing_index(index), + None => self + .body_scope_index + .expect("we should have a body scope index"), + }; + + self.scope_notes + .enter_scope(gcthing_index, offset, parent_scope_note_index) + } + + pub fn leave_scope_hole(&mut self, index: ScopeNoteIndex) { + let offset = self.bytecode_offset(); + self.scope_notes.leave_scope(index, offset); + } + + pub fn switch_to_main(&mut self) { + self.main_offset = self.bytecode_offset(); + } + + pub fn into_stencil( + self, + script_data_list: &mut ImmutableScriptDataList, + extent: SourceExtent, + ) -> Result<ScriptStencil, EmitError> { + let main_offset: usize = self.main_offset.into(); + let nfixed: u32 = self.max_fixed_slots.into(); + let nslots = nfixed as usize + self.maximum_stack_depth; + + let immutable_script_data = script_data_list.push(ImmutableScriptData { + main_offset: main_offset + .try_into() + .map_err(|_| EmitError::NotImplemented("Throwing allocation overflow"))?, + nfixed: self.max_fixed_slots, + nslots: nslots + .try_into() + .map_err(|_| EmitError::NotImplemented("Throwing JSMSG_NEED_DIET"))?, + body_scope_index: usize::from(self.body_scope_index.expect("body scope should be set")) + .try_into() + .map_err(|_| EmitError::NotImplemented("Throwing allocation overflow"))?, + num_ic_entries: self.num_ic_entries.try_into().unwrap(), + fun_length: 0, + + bytecode: self.bytecode, + scope_notes: self.scope_notes.into(), + }); + + Ok(ScriptStencil::top_level_script( + self.gcthings.into(), + immutable_script_data, + extent, + )) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/emitter_scope.rs b/third_party/rust/jsparagus-emitter/src/emitter_scope.rs new file mode 100644 index 0000000000..193ac3f844 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/emitter_scope.rs @@ -0,0 +1,453 @@ +//! Code for tracking scopes and looking up names as the emitter traverses +//! the program. +//! +//! EmitterScopes exist only while the bytecode emitter is working. +//! Longer-lived scope information is stored in `ScopeDataMap`. + +use crate::emitter::InstructionWriter; +use ast::source_atom_set::SourceAtomSetIndex; +use std::collections::HashMap; +use std::iter::Iterator; +use stencil::env_coord::{EnvironmentHops, EnvironmentSlot}; +use stencil::frame_slot::FrameSlot; +use stencil::scope::{BindingKind, GlobalScopeData, LexicalScopeData, ScopeDataMap, ScopeIndex}; +use stencil::scope_notes::ScopeNoteIndex; + +/// Result of looking up a name. +/// +/// Corresponds to js::frontend::NameLocation in +/// m-c/js/src/frontend/NameAnalysisTypes.h +#[derive(Debug, Clone)] +pub enum NameLocation { + Dynamic, + Global(BindingKind), + FrameSlot(FrameSlot, BindingKind), + EnvironmentCoord(EnvironmentHops, EnvironmentSlot, BindingKind), +} + +#[derive(Debug, Copy, Clone, PartialEq)] +pub struct EmitterScopeDepth { + index: usize, +} + +impl EmitterScopeDepth { + fn has_parent(&self) -> bool { + self.index > 0 + } + + fn parent(&self) -> Self { + debug_assert!(self.has_parent()); + + Self { + index: self.index - 1, + } + } +} + +// --- EmitterScope types +// +// These types are the variants of enum EmitterScope. + +#[derive(Debug)] +pub struct GlobalEmitterScope { + cache: HashMap<SourceAtomSetIndex, NameLocation>, +} + +impl GlobalEmitterScope { + fn new(data: &GlobalScopeData) -> Self { + let mut cache = HashMap::new(); + for item in data.iter() { + cache.insert(item.name(), NameLocation::Global(item.kind())); + } + Self { cache } + } + + fn lookup_name(&self, name: SourceAtomSetIndex) -> Option<NameLocation> { + match self.cache.get(&name) { + Some(loc) => Some(loc.clone()), + None => Some(NameLocation::Global(BindingKind::Var)), + } + } + + fn next_frame_slot(&self) -> FrameSlot { + FrameSlot::new(0) + } + + fn scope_note_index(&self) -> Option<ScopeNoteIndex> { + None + } + + fn has_environment_object(&self) -> bool { + false + } +} + +struct LexicalEnvironmentObject {} +impl LexicalEnvironmentObject { + fn first_free_slot() -> u32 { + // FIXME: This is the value of + // `JSSLOT_FREE(&LexicalEnvironmentObject::class_)` + // in SpiderMonkey + 2 + } +} + +#[derive(Debug)] +pub struct LexicalEmitterScope { + cache: HashMap<SourceAtomSetIndex, NameLocation>, + next_frame_slot: FrameSlot, + needs_environment_object: bool, + scope_note_index: Option<ScopeNoteIndex>, +} + +impl LexicalEmitterScope { + pub fn new(data: &LexicalScopeData, first_frame_slot: FrameSlot) -> Self { + let is_all_bindings_closed_over = data.base.is_all_bindings_closed_over(); + let mut needs_environment_object = false; + + let mut cache = HashMap::new(); + let mut frame_slot = first_frame_slot; + let mut env_slot = EnvironmentSlot::new(LexicalEnvironmentObject::first_free_slot()); + for item in data.iter() { + if is_all_bindings_closed_over || item.is_closed_over() { + cache.insert( + item.name(), + NameLocation::EnvironmentCoord(EnvironmentHops::new(0), env_slot, item.kind()), + ); + env_slot.next(); + needs_environment_object = true; + } else { + cache.insert( + item.name(), + NameLocation::FrameSlot(frame_slot, item.kind()), + ); + frame_slot.next(); + } + } + + Self { + cache, + next_frame_slot: frame_slot, + needs_environment_object, + scope_note_index: None, + } + } + + fn lookup_name(&self, name: SourceAtomSetIndex) -> Option<NameLocation> { + match self.cache.get(&name) { + Some(loc) => Some(loc.clone()), + None => None, + } + } + + fn next_frame_slot(&self) -> FrameSlot { + self.next_frame_slot + } + + fn scope_note_index(&self) -> Option<ScopeNoteIndex> { + self.scope_note_index + } + + pub fn has_environment_object(&self) -> bool { + self.needs_environment_object + } +} + +/// The information about a scope needed for emitting bytecode. +#[derive(Debug)] +pub enum EmitterScope { + Global(GlobalEmitterScope), + Lexical(LexicalEmitterScope), +} + +impl EmitterScope { + fn lookup_name(&self, name: SourceAtomSetIndex) -> Option<NameLocation> { + match self { + EmitterScope::Global(scope) => scope.lookup_name(name), + EmitterScope::Lexical(scope) => scope.lookup_name(name), + } + } + + fn next_frame_slot(&self) -> FrameSlot { + match self { + EmitterScope::Global(scope) => scope.next_frame_slot(), + EmitterScope::Lexical(scope) => scope.next_frame_slot(), + } + } + + pub fn scope_note_index(&self) -> Option<ScopeNoteIndex> { + match self { + EmitterScope::Global(scope) => scope.scope_note_index(), + EmitterScope::Lexical(scope) => scope.scope_note_index(), + } + } + + fn has_environment_object(&self) -> bool { + match self { + EmitterScope::Global(scope) => scope.has_environment_object(), + EmitterScope::Lexical(scope) => scope.has_environment_object(), + } + } + + fn is_var_scope(&self) -> bool { + match self { + EmitterScope::Global(_) => true, + EmitterScope::Lexical(_) => false, + } + } +} + +/// Stack that tracks the current scope chain while emitting bytecode. +/// +/// Bytecode is emitted by traversing the structure of the program. During this +/// process there's always a "current position". This stack represents the +/// scope chain at the current position. It is updated when we enter or leave +/// any node that has its own scope. +/// +/// Unlike the C++ impl, this uses an explicit stack struct, since Rust cannot +/// store a reference to a stack-allocated object in another object that has a +/// longer lifetime. +pub struct EmitterScopeStack { + scope_stack: Vec<EmitterScope>, +} + +impl EmitterScopeStack { + /// Create a new, empty scope stack. + pub fn new() -> Self { + Self { + scope_stack: Vec::new(), + } + } + + /// The current innermost scope. + fn innermost(&self) -> &EmitterScope { + self.scope_stack + .last() + .expect("There should be at least one scope") + } + + /// Enter the global scope. Call this once at the beginning of a top-level + /// script. + /// + /// This emits bytecode that implements parts of [ScriptEvaluation][1] and + /// [GlobalDeclarationInstantiation][2]. + /// + /// [1]: https://tc39.es/ecma262/#sec-runtime-semantics-scriptevaluation + /// [2]: https://tc39.es/ecma262/#sec-globaldeclarationinstantiation + pub fn enter_global( + &mut self, + emit: &mut InstructionWriter, + scope_data_map: &ScopeDataMap, + top_level_function_count: u32, + ) { + let scope_index = scope_data_map.get_global_index(); + let scope_data = scope_data_map.get_global_at(scope_index); + + // The outermost scope should be the first item in the GC things list. + // Enter global scope here, before emitting any name ops below. + emit.enter_global_scope(scope_index); + + if scope_data.base.bindings.len() > 0 { + emit.global_or_eval_decl_instantiation(top_level_function_count); + } + + emit.switch_to_main(); + + let scope = EmitterScope::Global(GlobalEmitterScope::new(scope_data)); + self.scope_stack.push(scope); + } + + /// Leave the global scope. Call this once at the end of a top-level + /// script. + pub fn leave_global(&mut self, emit: &InstructionWriter) { + match self.scope_stack.pop() { + Some(EmitterScope::Global(_)) => {} + _ => panic!("unmatching scope"), + } + emit.leave_global_scope(); + } + + /// Emit bytecode to mark some local lexical variables as uninitialized. + fn dead_zone_frame_slot_range( + &self, + emit: &mut InstructionWriter, + slot_start: FrameSlot, + slot_end: FrameSlot, + ) { + if slot_start == slot_end { + return; + } + + emit.uninitialized(); + let mut slot = slot_start; + while slot < slot_end { + emit.init_lexical(slot.into()); + slot.next(); + } + emit.pop(); + } + + /// Enter a lexical scope. + /// + /// A new LexicalEmitterScope based on scope_data_map is pushed to the + /// scope stack. Bytecode is emitted to mark the new lexical bindings as + /// uninitialized. + pub fn enter_lexical( + &mut self, + emit: &mut InstructionWriter, + scope_data_map: &mut ScopeDataMap, + scope_index: ScopeIndex, + ) { + let mut scope_data = scope_data_map.get_lexical_at_mut(scope_index); + + let parent_scope_note_index = self.innermost().scope_note_index(); + + let first_frame_slot = self.innermost().next_frame_slot(); + scope_data.first_frame_slot = first_frame_slot; + let mut lexical_scope = LexicalEmitterScope::new(scope_data, first_frame_slot); + let next_frame_slot = lexical_scope.next_frame_slot; + let index = emit.enter_lexical_scope( + scope_index, + parent_scope_note_index, + next_frame_slot, + lexical_scope.needs_environment_object, + ); + lexical_scope.scope_note_index = Some(index); + + let scope = EmitterScope::Lexical(lexical_scope); + self.scope_stack.push(scope); + + self.dead_zone_frame_slot_range(emit, first_frame_slot, next_frame_slot); + } + + /// Leave a lexical scope. + pub fn leave_lexical(&mut self, emit: &mut InstructionWriter) { + let lexical_scope = match self.scope_stack.pop() { + Some(EmitterScope::Lexical(scope)) => scope, + _ => panic!("unmatching scope"), + }; + emit.leave_lexical_scope( + lexical_scope + .scope_note_index + .expect("scope note index should be populated"), + lexical_scope.needs_environment_object, + ); + } + + /// Resolve a name by searching the current scope chain. + /// + /// Implements the parts of [ResolveBinding][1] that can be done at + /// emit time. + /// + /// [1]: https://tc39.es/ecma262/#sec-resolvebinding + pub fn lookup_name(&mut self, name: SourceAtomSetIndex) -> NameLocation { + let mut hops = EnvironmentHops::new(0); + + for scope in self.scope_stack.iter().rev() { + if let Some(loc) = scope.lookup_name(name) { + return match loc { + NameLocation::EnvironmentCoord(orig_hops, slot, kind) => { + debug_assert!(u8::from(orig_hops) == 0u8); + NameLocation::EnvironmentCoord(hops, slot, kind) + } + _ => loc, + }; + } + if scope.has_environment_object() { + hops.next(); + } + } + + NameLocation::Dynamic + } + + /// Just like lookup_name, but only in var scope. + pub fn lookup_name_in_var(&mut self, name: SourceAtomSetIndex) -> NameLocation { + let mut hops = EnvironmentHops::new(0); + + for scope in self.scope_stack.iter().rev() { + if scope.is_var_scope() { + if let Some(loc) = scope.lookup_name(name) { + return match loc { + NameLocation::EnvironmentCoord(orig_hops, slot, kind) => { + debug_assert!(u8::from(orig_hops) == 0u8); + NameLocation::EnvironmentCoord(hops, slot, kind) + } + _ => loc, + }; + } + } + + if scope.has_environment_object() { + hops.next(); + } + } + + NameLocation::Dynamic + } + + pub fn current_depth(&self) -> EmitterScopeDepth { + EmitterScopeDepth { + index: self.scope_stack.len() - 1, + } + } + + /// Walk the scope stack up to and including `to` depth. + /// See EmitterScopeWalker for the details. + pub fn walk_up_to_including<'a>(&'a self, to: EmitterScopeDepth) -> EmitterScopeWalker<'a> { + EmitterScopeWalker::new(self, to) + } + + pub fn get_current_scope_note_index(&self) -> Option<ScopeNoteIndex> { + self.innermost().scope_note_index() + } + + fn get<'a>(&'a self, index: EmitterScopeDepth) -> &'a EmitterScope { + self.scope_stack + .get(index.index) + .expect("scope should exist") + } +} + +/// Walk the scope stack up to `to`, and yields EmitterScopeWalkItem for +/// each scope. +/// +/// The first item is `{ outer: parent-of-innermost, inner: innermost }`, and +/// the last item is `{ outer: to, inner: child-of-to }`. +pub struct EmitterScopeWalker<'a> { + stack: &'a EmitterScopeStack, + to: EmitterScopeDepth, + current: EmitterScopeDepth, +} + +impl<'a> EmitterScopeWalker<'a> { + fn new(stack: &'a EmitterScopeStack, to: EmitterScopeDepth) -> Self { + let current = stack.current_depth(); + + Self { stack, to, current } + } +} + +pub struct EmitterScopeWalkItem<'a> { + pub outer: &'a EmitterScope, + pub inner: &'a EmitterScope, +} + +impl<'a> Iterator for EmitterScopeWalker<'a> { + type Item = EmitterScopeWalkItem<'a>; + + fn next(&mut self) -> Option<EmitterScopeWalkItem<'a>> { + if self.current == self.to { + return None; + } + + let outer_index = self.current.parent(); + let inner_index = self.current; + self.current = outer_index; + + Some(EmitterScopeWalkItem { + inner: self.stack.get(inner_index), + outer: self.stack.get(outer_index), + }) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/expression_emitter.rs b/third_party/rust/jsparagus-emitter/src/expression_emitter.rs new file mode 100644 index 0000000000..45f0e10664 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/expression_emitter.rs @@ -0,0 +1,31 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; + +pub struct ExpressionEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub expr: F, +} + +impl<F> ExpressionEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + (self.expr)(emitter)?; + // [stack] VAL + + if emitter.options.no_script_rval { + emitter.emit.pop(); + // [stack] + } else { + emitter.emit.set_rval(); + // [stack] + } + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/function_declaration_emitter.rs b/third_party/rust/jsparagus-emitter/src/function_declaration_emitter.rs new file mode 100644 index 0000000000..5eefd5b215 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/function_declaration_emitter.rs @@ -0,0 +1,77 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; +use crate::reference_op_emitter::{ + AssignmentEmitter, DeclarationEmitter, GetNameEmitter, NameReferenceEmitter, +}; +use ast::source_atom_set::SourceAtomSetIndex; +use stencil::gcthings::GCThingIndex; +use stencil::script::ScriptStencilIndex; + +pub struct LazyFunctionEmitter { + pub stencil_index: ScriptStencilIndex, +} + +impl LazyFunctionEmitter { + pub fn emit(self, emitter: &mut AstEmitter) -> GCThingIndex { + emitter + .compilation_info + .scripts + .get_mut(self.stencil_index) + .set_function_emitted(); + emitter.emit.get_function_gcthing_index(self.stencil_index) + } +} + +pub struct LexicalFunctionDeclarationEmitter { + pub name: SourceAtomSetIndex, + pub fun_index: GCThingIndex, +} + +impl LexicalFunctionDeclarationEmitter { + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + DeclarationEmitter { + lhs: |emitter| { + Ok(NameReferenceEmitter { name: self.name }.emit_for_declaration(emitter)) + }, + rhs: |emitter| { + emitter.emit.lambda(self.fun_index); + Ok(()) + }, + } + .emit(emitter)?; + + emitter.emit.pop(); + + Ok(()) + } +} + +pub struct AnnexBFunctionDeclarationEmitter { + pub name: SourceAtomSetIndex, + pub fun_index: GCThingIndex, +} + +impl AnnexBFunctionDeclarationEmitter { + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + LexicalFunctionDeclarationEmitter { + name: self.name, + fun_index: self.fun_index, + } + .emit(emitter)?; + + AssignmentEmitter { + lhs: |emitter| { + Ok(NameReferenceEmitter { name: self.name }.emit_for_var_assignment(emitter)) + }, + rhs: |emitter| { + GetNameEmitter { name: self.name }.emit(emitter); + Ok(()) + }, + } + .emit(emitter)?; + + emitter.emit.pop(); + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/lib.rs b/third_party/rust/jsparagus-emitter/src/lib.rs new file mode 100644 index 0000000000..e4b5d49bc1 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/lib.rs @@ -0,0 +1,177 @@ +mod array_emitter; +mod ast_emitter; +mod block_emitter; +mod compilation_info; +mod control_structures; +mod dis; +mod emitter; +mod emitter_scope; +mod expression_emitter; +mod function_declaration_emitter; +mod object_emitter; +mod reference_op_emitter; +mod script_emitter; + +extern crate jsparagus_ast as ast; +extern crate jsparagus_scope as scope; +extern crate jsparagus_stencil as stencil; + +pub use crate::emitter::{EmitError, EmitOptions}; +pub use dis::dis; + +use crate::compilation_info::CompilationInfo; + +use ast::source_atom_set::SourceAtomSet; +use ast::source_slice_list::SourceSliceList; +use scope::{ScopeBuildError, ScopePassResult}; +use stencil::result::EmitResult; + +pub fn emit<'alloc>( + ast: &'alloc ast::types::Program<'alloc>, + options: &EmitOptions, + atoms: SourceAtomSet<'alloc>, + slices: SourceSliceList<'alloc>, +) -> Result<EmitResult<'alloc>, EmitError> { + let ScopePassResult { + scope_data_map, + function_declarations, + function_stencil_indices, + function_declaration_properties, + scripts, + error, + } = scope::generate_scope_data(ast); + + // Error case for scope analysis will be removed once all syntax is + // supported. Use field instead of Result type here for simplicity. + match error { + Some(ScopeBuildError::NotImplemented(s)) => { + return Err(EmitError::NotImplemented(s)); + } + None => {} + } + + let compilation_info = CompilationInfo::new( + atoms, + slices, + scope_data_map, + function_declarations, + function_stencil_indices, + function_declaration_properties, + scripts, + ); + ast_emitter::emit_program(ast, options, compilation_info) +} + +#[cfg(test)] +mod tests { + extern crate jsparagus_parser as parser; + + use super::{emit, EmitOptions}; + use crate::dis::*; + use ast::source_atom_set::SourceAtomSet; + use ast::source_slice_list::SourceSliceList; + use bumpalo::Bump; + use parser::{parse_script, ParseOptions}; + use std::cell::RefCell; + use std::convert::TryInto; + use std::rc::Rc; + use stencil::opcode::*; + use stencil::script::SourceExtent; + + fn bytecode(source: &str) -> Vec<u8> { + let alloc = &Bump::new(); + let parse_options = ParseOptions::new(); + let atoms = Rc::new(RefCell::new(SourceAtomSet::new())); + let slices = Rc::new(RefCell::new(SourceSliceList::new())); + let source_len = source.len(); + let parse_result = + parse_script(alloc, source, &parse_options, atoms.clone(), slices.clone()) + .expect("Failed to parse"); + // println!("{:?}", parse_result); + + let extent = SourceExtent::top_level_script(source_len.try_into().unwrap(), 1, 0); + let emit_options = EmitOptions::new(extent); + + let result = emit( + alloc.alloc(ast::types::Program::Script(parse_result.unbox())), + &emit_options, + atoms.replace(SourceAtomSet::new_uninitialized()), + slices.replace(SourceSliceList::new()), + ) + .expect("Should work!"); + + let script_data_index: usize = result.scripts[0] + .immutable_script_data + .expect("Top level script should have ImmutableScriptData") + .into(); + let script_data = &result.script_data_list[script_data_index]; + let bytecode = &script_data.bytecode; + + println!("{}", dis(&bytecode)); + bytecode.to_vec() + } + + #[test] + fn it_works() { + assert_eq!( + bytecode("2 + 2"), + vec![ + Opcode::Int8 as u8, + 2, + Opcode::Int8 as u8, + 2, + Opcode::Add as u8, + Opcode::SetRval as u8, + Opcode::RetRval as u8, + ] + ) + } + + #[test] + fn dis_call() { + assert_eq!( + bytecode("dis()"), + vec![ + Opcode::GetGName as u8, + 1, + 0, + 0, + 0, + Opcode::Undefined as u8, + Opcode::Call as u8, + 0, + 0, + Opcode::SetRval as u8, + Opcode::RetRval as u8, + ] + ) + } + + #[test] + fn literals() { + assert_eq!( + bytecode("true"), + vec![ + Opcode::True as u8, + Opcode::SetRval as u8, + Opcode::RetRval as u8, + ] + ); + assert_eq!( + bytecode("false"), + vec![ + Opcode::False as u8, + Opcode::SetRval as u8, + Opcode::RetRval as u8, + ] + ); + //assert_eq!( + // bytecode("'hello world'"), + // vec![ + // Opcode::String as u8, 0, 0, 0, 0, + // Opcode::SetRval as u8, + // Opcode::RetRval as u8, + // ] + //); + } +} diff --git a/third_party/rust/jsparagus-emitter/src/object_emitter.rs b/third_party/rust/jsparagus-emitter/src/object_emitter.rs new file mode 100644 index 0000000000..9f77382aa4 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/object_emitter.rs @@ -0,0 +1,162 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; +use ast::source_atom_set::SourceAtomSetIndex; + +/// Struct for emitting bytecode for a property where its name is string. +/// +/// If the property name is a string representing a number, it is considered an +/// index. In this case, NamePropertyEmitter falls back internally to +/// IndexPropertyEmitter +pub struct NamePropertyEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub state: &'a mut ObjectEmitterState, + pub key: SourceAtomSetIndex, + pub value: F, +} + +impl<'a, F> NamePropertyEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] OBJ + + match self.to_property_index(emitter, self.key) { + Some(value) => { + IndexPropertyEmitter { + state: self.state, + key: value as f64, + value: self.value, + } + .emit(emitter)?; + // [stack] OBJ + } + None => { + let name_index = emitter.emit.get_atom_gcthing_index(self.key); + + (self.value)(emitter)?; + // [stack] OBJ VALUE + + emitter.emit.init_prop(name_index); + // [stack] OBJ + } + } + Ok(()) + } + + fn to_property_index( + &self, + emitter: &mut AstEmitter, + index: SourceAtomSetIndex, + ) -> Option<u32> { + let s = emitter.compilation_info.atoms.get(index); + s.parse::<u32>().ok() + } +} + +/// Struct for emitting bytecode for a property where its name is number. +pub struct IndexPropertyEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub state: &'a mut ObjectEmitterState, + pub key: f64, + pub value: F, +} + +impl<'a, F> IndexPropertyEmitter<'a, F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] OBJ + + emitter.emit.numeric(self.key); + // [stack] OBJ KEY + + (self.value)(emitter)?; + // [stack] OBJ KEY VALUE + + emitter.emit.init_elem(); + // [stack] OBJ + + Ok(()) + } +} + +/// Struct for emitting bytecode for a computed property. +pub struct ComputedPropertyEmitter<'a, F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub state: &'a mut ObjectEmitterState, + pub key: F1, + pub value: F2, +} + +impl<'a, F1, F2> ComputedPropertyEmitter<'a, F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] OBJ + + (self.key)(emitter)?; + // [stack] OBJ KEY + + (self.value)(emitter)?; + // [stack] OBJ KEY VALUE + + emitter.emit.init_elem(); + // [stack] OBJ + + Ok(()) + } +} + +/// No state so far. +struct ObjectEmitterStateInternal {} + +/// Opaque struct that can be created only by ObjectEmitter. +/// This guarantees that *PropertyEmitter structs cannot be used outside +/// of ObjectEmitter callback. +pub struct ObjectEmitterState(ObjectEmitterStateInternal); + +impl ObjectEmitterState { + fn new() -> Self { + Self(ObjectEmitterStateInternal {}) + } +} + +pub struct ObjectEmitter<'a, PropT, PropF> +where + PropF: Fn(&mut AstEmitter, &mut ObjectEmitterState, &PropT) -> Result<(), EmitError>, +{ + pub properties: std::slice::Iter<'a, PropT>, + pub prop: PropF, +} + +impl<'a, PropT, PropF> ObjectEmitter<'a, PropT, PropF> +where + PropF: Fn(&mut AstEmitter, &mut ObjectEmitterState, &PropT) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + emitter.emit.new_init(); + // [stack] OBJ + + let mut state = ObjectEmitterState::new(); + + for prop in self.properties { + (self.prop)(emitter, &mut state, prop)?; + // [stack] OBJ + } + + Ok(()) + } +} diff --git a/third_party/rust/jsparagus-emitter/src/reference_op_emitter.rs b/third_party/rust/jsparagus-emitter/src/reference_op_emitter.rs new file mode 100644 index 0000000000..4a784e5b04 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/reference_op_emitter.rs @@ -0,0 +1,829 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; +use crate::emitter_scope::NameLocation; +use ast::source_atom_set::SourceAtomSetIndex; +use stencil::env_coord::{EnvironmentHops, EnvironmentSlot}; +use stencil::frame_slot::FrameSlot; +use stencil::gcthings::GCThingIndex; +use stencil::scope::BindingKind; + +#[derive(Debug, PartialEq)] +enum AssignmentReferenceKind { + GlobalVar(GCThingIndex), + GlobalLexical(GCThingIndex), + FrameSlotLexical(FrameSlot), + FrameSlotNonLexical(FrameSlot), + EnvironmentCoordLexical(EnvironmentHops, EnvironmentSlot), + EnvironmentCoordNonLexical(EnvironmentHops, EnvironmentSlot), + Dynamic(GCThingIndex), + #[allow(dead_code)] + Prop(GCThingIndex), + #[allow(dead_code)] + Elem, +} + +// See AssignmentReferenceEmitter. +// This uses struct to hide the details from the consumer. +#[derive(Debug)] +#[must_use] +pub struct AssignmentReference { + kind: AssignmentReferenceKind, +} +impl AssignmentReference { + fn new(kind: AssignmentReferenceKind) -> Self { + Self { kind } + } + + fn stack_slots(&self) -> usize { + match self.kind { + AssignmentReferenceKind::GlobalVar(_) => 1, + AssignmentReferenceKind::GlobalLexical(_) => 1, + AssignmentReferenceKind::FrameSlotLexical(_) => 0, + AssignmentReferenceKind::FrameSlotNonLexical(_) => 0, + AssignmentReferenceKind::EnvironmentCoordLexical(_, _) => 0, + AssignmentReferenceKind::EnvironmentCoordNonLexical(_, _) => 0, + AssignmentReferenceKind::Dynamic(_) => 1, + AssignmentReferenceKind::Prop(_) => 1, + AssignmentReferenceKind::Elem => 2, + } + } +} + +#[derive(Debug, PartialEq)] +enum DeclarationReferenceKind { + GlobalVar(GCThingIndex), + GlobalLexical(GCThingIndex), + FrameSlot(FrameSlot), + EnvironmentCoord(EnvironmentHops, EnvironmentSlot), +} + +// See DeclarationReferenceEmitter. +// This uses struct to hide the details from the consumer. +#[derive(Debug)] +#[must_use] +pub struct DeclarationReference { + kind: DeclarationReferenceKind, +} +impl DeclarationReference { + fn new(kind: DeclarationReferenceKind) -> Self { + Self { kind } + } +} + +#[derive(Debug, PartialEq)] +enum CallKind { + Normal, + // FIXME: Support eval, Function#call, Function#apply etc. +} + +#[derive(Debug, PartialEq)] +enum ValueIsOnStack { + No, + Yes, +} + +fn check_frame_temporary_dead_zone( + emitter: &mut AstEmitter, + slot: FrameSlot, + is_on_stack: ValueIsOnStack, +) { + // FIXME: Use cache to avoid emitting check_lexical twice or more. + // FIXME: Support aliased lexical. + + // [stack] VAL? + + if is_on_stack == ValueIsOnStack::No { + emitter.emit.get_local(slot.into()); + // [stack] VAL + } + + emitter.emit.check_lexical(slot.into()); + // [stack] VAL + + if is_on_stack == ValueIsOnStack::No { + emitter.emit.pop(); + // [stack] + } + + // [stack] VAL? +} + +fn check_env_temporary_dead_zone( + emitter: &mut AstEmitter, + hops: EnvironmentHops, + slot: EnvironmentSlot, + is_on_stack: ValueIsOnStack, +) { + // FIXME: Use cache to avoid emitting check_lexical twice or more. + // FIXME: Support aliased lexical. + + // [stack] VAL? + + if is_on_stack == ValueIsOnStack::No { + emitter.emit.get_aliased_var(hops.into(), slot.into()); + // [stack] VAL + } + + emitter.emit.check_aliased_lexical(hops.into(), slot.into()); + // [stack] VAL + + if is_on_stack == ValueIsOnStack::No { + emitter.emit.pop(); + // [stack] + } + + // [stack] VAL? +} + +// See *ReferenceEmitter. +// This uses struct to hide the details from the consumer. +#[derive(Debug)] +#[must_use] +pub struct CallReference { + kind: CallKind, +} +impl CallReference { + fn new(kind: CallKind) -> Self { + Self { kind } + } +} + +// Struct for emitting bytecode for get `name` operation. +pub struct GetNameEmitter { + pub name: SourceAtomSetIndex, +} +impl GetNameEmitter { + pub fn emit(self, emitter: &mut AstEmitter) { + let name_index = emitter.emit.get_atom_gcthing_index(self.name); + let loc = emitter.lookup_name(self.name); + + // [stack] + + match loc { + NameLocation::Global(_kind) => { + emitter.emit.get_g_name(name_index); + // [stack] VAL + } + NameLocation::Dynamic => { + emitter.emit.get_name(name_index); + // [stack] VAL + } + NameLocation::FrameSlot(slot, kind) => { + emitter.emit.get_local(slot.into()); + // [stack] VAL + + if kind == BindingKind::Let || kind == BindingKind::Const { + check_frame_temporary_dead_zone(emitter, slot, ValueIsOnStack::Yes); + // [stack] VAL + } + } + NameLocation::EnvironmentCoord(hops, slot, kind) => { + emitter.emit.get_aliased_var(hops.into(), slot.into()); + + if kind == BindingKind::Let || kind == BindingKind::Const { + check_env_temporary_dead_zone(emitter, hops, slot, ValueIsOnStack::Yes); + // [stack] VAL + } + } + } + } +} + +// Struct for emitting bytecode for get `obj.key` operation. +pub struct GetPropEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub obj: F, + pub key: SourceAtomSetIndex, +} +impl<F> GetPropEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + let key_index = emitter.emit.get_atom_gcthing_index(self.key); + + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ + + emitter.emit.get_prop(key_index); + // [stack] VAL + + Ok(()) + } +} + +// Struct for emitting bytecode for get `super.key` operation. +pub struct GetSuperPropEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub this: F, + pub key: SourceAtomSetIndex, +} +impl<F> GetSuperPropEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + let key_index = emitter.emit.get_atom_gcthing_index(self.key); + + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.this)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS + + emitter.emit.callee(); + // [stack] THIS CALLEE + + emitter.emit.super_base(); + // [stack] THIS OBJ + + emitter.emit.get_prop_super(key_index); + // [stack] VAL + + Ok(()) + } +} + +// Struct for emitting bytecode for get `obj[key]` operation. +pub struct GetElemEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub obj: F1, + pub key: F2, +} +impl<F1, F2> GetElemEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ + + let depth = emitter.emit.stack_depth(); + (self.key)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ KEY + + emitter.emit.get_elem(); + // [stack] VAL + + Ok(()) + } +} + +// Struct for emitting bytecode for get `super[key]` operation. +pub struct GetSuperElemEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub this: F1, + pub key: F2, +} +impl<F1, F2> GetSuperElemEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.this)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS + + let depth = emitter.emit.stack_depth(); + (self.key)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS KEY + + emitter.emit.callee(); + // [stack] THIS KEY CALLEE + + emitter.emit.super_base(); + // [stack] THIS KEY OBJ + + emitter.emit.get_elem_super(); + // [stack] VAL + + Ok(()) + } +} + +// Struct for emitting bytecode for `name` reference. +pub struct NameReferenceEmitter { + pub name: SourceAtomSetIndex, +} +impl NameReferenceEmitter { + pub fn emit_for_call(self, emitter: &mut AstEmitter) -> CallReference { + let name_index = emitter.emit.get_atom_gcthing_index(self.name); + let loc = emitter.lookup_name(self.name); + + // [stack] + + match loc { + NameLocation::Global(_kind) => { + emitter.emit.get_g_name(name_index); + // [stack] CALLEE + + // NOTE: We don't support non-syntactic scope. + // See NameOpEmitter::emitGet in SpiderMonkey for omitted + // cases. + + emitter.emit.undefined(); + // [stack] CALLEE THIS + } + NameLocation::Dynamic => { + emitter.emit.get_name(name_index); + // [stack] CALLEE + + // NOTE: We don't support non-syntactic scope or with statement. + // See NameOpEmitter::emitGet in SpiderMonkey for omitted + // cases. + + emitter.emit.undefined(); + // [stack] CALLEE THIS + } + NameLocation::FrameSlot(slot, kind) => { + emitter.emit.get_local(slot.into()); + // [stack] CALLEE + + if kind == BindingKind::Let || kind == BindingKind::Const { + check_frame_temporary_dead_zone(emitter, slot, ValueIsOnStack::Yes); + // [stack] CALLEE + } + + emitter.emit.undefined(); + // [stack] CALLEE THIS + } + NameLocation::EnvironmentCoord(hops, slot, kind) => { + emitter.emit.get_aliased_var(hops.into(), slot.into()); + // [stack] CALLEE + + if kind == BindingKind::Let || kind == BindingKind::Const { + check_env_temporary_dead_zone(emitter, hops, slot, ValueIsOnStack::Yes); + // [stack] CALLEE + } + + emitter.emit.undefined(); + // [stack] CALLEE THIS + } + } + + CallReference::new(CallKind::Normal) + } + + pub fn emit_for_assignment_with_loc( + self, + emitter: &mut AstEmitter, + loc: NameLocation, + ) -> AssignmentReference { + let name_index = emitter.emit.get_atom_gcthing_index(self.name); + + // [stack] + + match loc { + NameLocation::Global(kind) => match kind { + BindingKind::Var => { + emitter.emit.bind_g_name(name_index); + // [stack] GLOBAL + AssignmentReference::new(AssignmentReferenceKind::GlobalVar(name_index)) + } + BindingKind::Let | BindingKind::Const => { + emitter.emit.bind_g_name(name_index); + // [stack] GLOBAL + AssignmentReference::new(AssignmentReferenceKind::GlobalLexical(name_index)) + } + }, + NameLocation::Dynamic => { + emitter.emit.bind_name(name_index); + // [stack] ENV + + AssignmentReference::new(AssignmentReferenceKind::Dynamic(name_index)) + } + NameLocation::FrameSlot(slot, kind) => { + if kind == BindingKind::Let || kind == BindingKind::Const { + AssignmentReference::new(AssignmentReferenceKind::FrameSlotLexical(slot)) + } else { + AssignmentReference::new(AssignmentReferenceKind::FrameSlotNonLexical(slot)) + } + } + NameLocation::EnvironmentCoord(hops, slot, kind) => { + if kind == BindingKind::Let || kind == BindingKind::Const { + AssignmentReference::new(AssignmentReferenceKind::EnvironmentCoordLexical( + hops, slot, + )) + } else { + AssignmentReference::new(AssignmentReferenceKind::EnvironmentCoordNonLexical( + hops, slot, + )) + } + } + } + } + + pub fn emit_for_assignment(self, emitter: &mut AstEmitter) -> AssignmentReference { + let loc = emitter.lookup_name(self.name); + self.emit_for_assignment_with_loc(emitter, loc) + } + + /// Ignore any lexical scope and assign to var scope. + /// Used by Annex B function. + pub fn emit_for_var_assignment(self, emitter: &mut AstEmitter) -> AssignmentReference { + let loc = emitter.lookup_name_in_var(self.name); + self.emit_for_assignment_with_loc(emitter, loc) + } + + pub fn emit_for_declaration(self, emitter: &mut AstEmitter) -> DeclarationReference { + let name_index = emitter.emit.get_atom_gcthing_index(self.name); + let loc = emitter.lookup_name(self.name); + + // [stack] + + match loc { + NameLocation::Global(kind) => match kind { + BindingKind::Var => { + emitter.emit.bind_g_name(name_index); + // [stack] GLOBAL + DeclarationReference::new(DeclarationReferenceKind::GlobalVar(name_index)) + } + BindingKind::Let | BindingKind::Const => { + DeclarationReference::new(DeclarationReferenceKind::GlobalLexical(name_index)) + } + }, + NameLocation::Dynamic => { + panic!("declaration should have non-dynamic location"); + } + NameLocation::FrameSlot(slot, _kind) => { + DeclarationReference::new(DeclarationReferenceKind::FrameSlot(slot)) + } + NameLocation::EnvironmentCoord(hops, slot, _kind) => { + // FIXME: does this happen???? + DeclarationReference::new(DeclarationReferenceKind::EnvironmentCoord(hops, slot)) + } + } + } +} + +// Struct for emitting bytecode for `obj.key` reference. +pub struct PropReferenceEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub obj: F, + pub key: SourceAtomSetIndex, +} +impl<F> PropReferenceEmitter<F> +where + F: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit_for_call(self, emitter: &mut AstEmitter) -> Result<CallReference, EmitError> { + let key_index = emitter.emit.get_atom_gcthing_index(self.key); + + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS + + emitter.emit.dup(); + // [stack] THIS THIS + + // FIXME: Support super. + emitter.emit.get_prop(key_index); + // [stack] THIS CALLEE + + emitter.emit.swap(); + // [stack] CALLEE THIS + + Ok(CallReference::new(CallKind::Normal)) + } + + #[allow(dead_code)] + pub fn emit_for_assignment( + self, + emitter: &mut AstEmitter, + ) -> Result<AssignmentReference, EmitError> { + let key_index = emitter.emit.get_atom_gcthing_index(self.key); + + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ + + Ok(AssignmentReference::new(AssignmentReferenceKind::Prop( + key_index, + ))) + } +} + +// Struct for emitting bytecode for `obj[key]` reference. +pub struct ElemReferenceEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub obj: F1, + pub key: F2, +} +impl<F1, F2> ElemReferenceEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit_for_call(self, emitter: &mut AstEmitter) -> Result<CallReference, EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS + + emitter.emit.dup(); + // [stack] THIS THIS + + let depth = emitter.emit.stack_depth(); + (self.key)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] THIS THIS KEY + + // FIXME: Support super. + emitter.emit.get_elem(); + // [stack] THIS CALLEE + + emitter.emit.swap(); + // [stack] CALLEE THIS + + Ok(CallReference::new(CallKind::Normal)) + } + + #[allow(dead_code)] + pub fn emit_for_assignment( + self, + emitter: &mut AstEmitter, + ) -> Result<AssignmentReference, EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.obj)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ + + let depth = emitter.emit.stack_depth(); + (self.key)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] OBJ KEY + + Ok(AssignmentReference::new(AssignmentReferenceKind::Elem)) + } +} + +// Struct for emitting bytecode for call `callee(arguments)` operation. +pub struct CallEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<CallReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<usize, EmitError>, +{ + pub callee: F1, + pub arguments: F2, +} +impl<F1, F2> CallEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<CallReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<usize, EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + let reference = (self.callee)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 2); + // [stack] CALLEE THIS + + // FIXME: Support spread. + let depth = emitter.emit.stack_depth(); + let len = (self.arguments)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + len); + // [stack] CALLEE THIS ARGS... + + match reference.kind { + CallKind::Normal => { + emitter.emit.call(len as u16); + // [stack] VAL + } + } + + Ok(()) + } +} + +// Struct for emitting bytecode for `new callee(arguments)` operation. +pub struct NewEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<usize, EmitError>, +{ + pub callee: F1, + pub arguments: F2, +} +impl<F1, F2> NewEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<(), EmitError>, + F2: Fn(&mut AstEmitter) -> Result<usize, EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + (self.callee)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] CALLEE + + emitter.emit.is_constructing(); + // [stack] CALLEE JS_IS_CONSTRUCTING + + // FIXME: Support spread. + let depth = emitter.emit.stack_depth(); + let len = (self.arguments)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + len); + // [stack] CALLEE JS_IS_CONSTRUCTING ARGS... + + emitter.emit.dup_at(len as u32 + 1); + // [stack] CALLEE JS_IS_CONSTRUCTING ARGS... CALLEE + + emitter.emit.new_(len as u16); + // [stack] VAL + + Ok(()) + } +} + +// Struct for emitting bytecode for assignment `lhs = rhs` operation. +pub struct AssignmentEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<AssignmentReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub lhs: F1, + pub rhs: F2, +} +impl<F1, F2> AssignmentEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<AssignmentReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + // [stack] + + let depth = emitter.emit.stack_depth(); + let reference = (self.lhs)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + reference.stack_slots()); + // [stack] REF... + + let depth = emitter.emit.stack_depth(); + (self.rhs)(emitter)?; + debug_assert_eq!(emitter.emit.stack_depth(), depth + 1); + // [stack] REF... VAL + + match reference.kind { + AssignmentReferenceKind::GlobalVar(name_index) => { + // [stack] GLOBAL VAL + + emitter.emit.set_g_name(name_index); + // [stack] VAL + } + AssignmentReferenceKind::GlobalLexical(name_index) => { + // [stack] VAL + + emitter.emit.set_g_name(name_index); + // [stack] VAL + } + AssignmentReferenceKind::Dynamic(name_index) => { + // [stack] ENV VAL + + emitter.emit.set_name(name_index); + // [stack] VAL + } + AssignmentReferenceKind::FrameSlotLexical(slot) => { + // [stack] VAL + + check_frame_temporary_dead_zone(emitter, slot, ValueIsOnStack::No); + // [stack] VAL + + emitter.emit.set_local(slot.into()); + // [stack] VAL + } + AssignmentReferenceKind::FrameSlotNonLexical(slot) => { + // [stack] VAL + + emitter.emit.set_local(slot.into()); + // [stack] VAL + } + AssignmentReferenceKind::EnvironmentCoordLexical(hops, slot) => { + // [stack] VAL + + check_env_temporary_dead_zone(emitter, hops, slot, ValueIsOnStack::No); + // [stack] VAL + + emitter.emit.set_aliased_var(hops.into(), slot.into()); + // [stack] VAL + } + AssignmentReferenceKind::EnvironmentCoordNonLexical(hops, slot) => { + // [stack] VAL + + emitter.emit.set_aliased_var(hops.into(), slot.into()); + // [stack] VAL + } + AssignmentReferenceKind::Prop(key_index) => { + // [stack] OBJ VAL + + // FIXME: Support strict mode and super. + emitter.emit.set_prop(key_index); + // [stack] VAL + } + AssignmentReferenceKind::Elem => { + // [stack] OBJ KEY VAL + + // FIXME: Support strict mode and super. + emitter.emit.set_elem(); + // [stack] VAL + } + } + + Ok(()) + } + + // FIXME: Support compound assignment +} + +// Struct for emitting bytecode for declaration `lhs = rhs` operation. +pub struct DeclarationEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<DeclarationReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub lhs: F1, + pub rhs: F2, +} +impl<F1, F2> DeclarationEmitter<F1, F2> +where + F1: Fn(&mut AstEmitter) -> Result<DeclarationReference, EmitError>, + F2: Fn(&mut AstEmitter) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + let reference = (self.lhs)(emitter)?; + + (self.rhs)(emitter)?; + + match reference.kind { + DeclarationReferenceKind::GlobalVar(name_index) => { + // [stack] GLOBAL VAL + + emitter.emit.set_g_name(name_index); + // [stack] VAL + } + DeclarationReferenceKind::GlobalLexical(name_index) => { + // [stack] VAL + + emitter.emit.init_g_lexical(name_index); + // [stack] VAL + } + DeclarationReferenceKind::FrameSlot(slot) => { + // [stack] VAL + + emitter.emit.init_lexical(slot.into()); + // [stack] VAL + } + DeclarationReferenceKind::EnvironmentCoord(hops, slot) => { + // [stack] VAL + + emitter.emit.init_aliased_lexical(hops.into(), slot.into()); + // [stack] VAL + } + } + + Ok(()) + } +} + +// FIXME: Add increment diff --git a/third_party/rust/jsparagus-emitter/src/script_emitter.rs b/third_party/rust/jsparagus-emitter/src/script_emitter.rs new file mode 100644 index 0000000000..36c24ca594 --- /dev/null +++ b/third_party/rust/jsparagus-emitter/src/script_emitter.rs @@ -0,0 +1,43 @@ +use crate::ast_emitter::AstEmitter; +use crate::emitter::EmitError; + +pub struct ScriptEmitter<'a, FuncT, FuncF, StmtT, StmtF> +where + FuncF: Fn(&mut AstEmitter, &FuncT) -> Result<(), EmitError>, + StmtF: Fn(&mut AstEmitter, &StmtT) -> Result<(), EmitError>, +{ + pub top_level_functions: std::slice::Iter<'a, FuncT>, + pub top_level_function: FuncF, + pub statements: std::slice::Iter<'a, StmtT>, + pub statement: StmtF, +} + +impl<'a, FuncT, FuncF, StmtT, StmtF> ScriptEmitter<'a, FuncT, FuncF, StmtT, StmtF> +where + FuncF: Fn(&mut AstEmitter, &FuncT) -> Result<(), EmitError>, + StmtF: Fn(&mut AstEmitter, &StmtT) -> Result<(), EmitError>, +{ + pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> { + let scope_data_map = &emitter.compilation_info.scope_data_map; + + emitter.scope_stack.enter_global( + &mut emitter.emit, + scope_data_map, + self.top_level_functions.len() as u32, + ); + + for fun in self.top_level_functions { + (self.top_level_function)(emitter, fun)?; + } + + for statement in self.statements { + (self.statement)(emitter, statement)?; + } + + emitter.emit.ret_rval(); + + emitter.scope_stack.leave_global(&mut emitter.emit); + + Ok(()) + } +} |