Bug 1655961 - Update opcode for bug 1653567. r=nbp

Differential Revision: https://phabricator.services.mozilla.com/D85289
This commit is contained in:
Tooru Fujisawa 2020-07-29 17:05:25 +00:00
Родитель 1680b0465b
Коммит 46f4638f20
31 изменённых файлов: 47345 добавлений и 57606 удалений

Просмотреть файл

@ -30,7 +30,7 @@ rev = "61dcc364ac0d6d0816ab88a494bbf20d824b009b"
[source."https://github.com/mozilla-spidermonkey/jsparagus"]
git = "https://github.com/mozilla-spidermonkey/jsparagus"
replace-with = "vendored-sources"
rev = "b427056af05f0d30192a5b3acccb83306912e22b"
rev = "fb2a93267697c33d54e71be4bb5e8e768760c27c"
[source."https://github.com/kvark/spirv_cross"]
branch = "wgpu3"

16
Cargo.lock сгенерированный
Просмотреть файл

@ -2399,7 +2399,7 @@ dependencies = [
[[package]]
name = "jsparagus"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"jsparagus-ast",
"jsparagus-emitter",
@ -2413,7 +2413,7 @@ dependencies = [
[[package]]
name = "jsparagus-ast"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"bumpalo",
"indexmap",
@ -2422,7 +2422,7 @@ dependencies = [
[[package]]
name = "jsparagus-emitter"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"bumpalo",
"byteorder",
@ -2435,7 +2435,7 @@ dependencies = [
[[package]]
name = "jsparagus-generated-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"bumpalo",
"jsparagus-ast",
@ -2445,12 +2445,12 @@ dependencies = [
[[package]]
name = "jsparagus-json-log"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
[[package]]
name = "jsparagus-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"arrayvec",
"bumpalo",
@ -2462,7 +2462,7 @@ dependencies = [
[[package]]
name = "jsparagus-scope"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"indexmap",
"jsparagus-ast",
@ -2472,7 +2472,7 @@ dependencies = [
[[package]]
name = "jsparagus-stencil"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=b427056af05f0d30192a5b3acccb83306912e22b#b427056af05f0d30192a5b3acccb83306912e22b"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=fb2a93267697c33d54e71be4bb5e8e768760c27c#fb2a93267697c33d54e71be4bb5e8e768760c27c"
dependencies = [
"jsparagus-ast",
]

Просмотреть файл

@ -12,12 +12,12 @@ log = "0.4"
# Disable regex feature for code size.
env_logger = {version = "0.6", default-features = false}
# For non-jsparagus developers.
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "b427056af05f0d30192a5b3acccb83306912e22b" }
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "fb2a93267697c33d54e71be4bb5e8e768760c27c" }
# For local development, replace above with
# jsparagus = { path = "{path to jsparagus}" }
[build-dependencies]
# For non-jsparagus developers.
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "b427056af05f0d30192a5b3acccb83306912e22b" }
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "fb2a93267697c33d54e71be4bb5e8e768760c27c" }
# For local development, replace above with
# jsparagus = { path = "{path to jsparagus}" }

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"00c740f44b5681e3277e519d2e143e6edfbff186ca31a07ddce2eb46b803ddd7","src/array_emitter.rs":"bbc6528321f1d11d7c86c4f2bfdcfc9dced8f0b8b1c30c9f0a5355f300d196b6","src/ast_emitter.rs":"c60daa1da57cd09003005e07061476cdb67bc00ca0ad0e29dffdb2c9a31123b2","src/block_emitter.rs":"78965260d87a66c5324d6f3bdfea0f1938f8037f70adde148dbb2db599d1b2c0","src/compilation_info.rs":"32ca7cdae514501de0d0d667ff2b27a3abf736ae207a23009c42eacbdffbd5b3","src/control_structures.rs":"bdb186e98c14fa4e769b23b3dee4376683e6c6530af0856d55c055aff4398b84","src/dis.rs":"4a335d813fa965482ca0f20a7b9295a55ce7625b577d42bd8b33b156b81c6306","src/emitter.rs":"ae22bd50afcf09500e39811d7ab493c32e3f43a1a65a0bd34f584f0bf568dbaf","src/emitter_scope.rs":"07a904b6973bd2dbc1f9395022c15f11f6befc9b0fb4f2f251ccfc3f09aff380","src/expression_emitter.rs":"f8e02785dffb179bbe9fe58e45bbfccc08adc3ad0a071a0073bed0feedc8ed9a","src/function_declaration_emitter.rs":"e69acd58c8db9eb77875000cce92240a38ad84482a5db7338c3bda75ce6430c2","src/lib.rs":"2389ff32700e35a98ca94198e1ab5fcaa22bc84745b074e20b04baef940247cf","src/object_emitter.rs":"998423b3d6ef8797fadef6763803627df72fde292b1b34d6a41b2e66a331a181","src/reference_op_emitter.rs":"4ead96ef4424e3937c3f73e22b4e103f97cee522b9926345caeddcf4643ff843","src/script_emitter.rs":"44a6be5ecdcde3c32d78d100a205d38be2591c7c2cc109967579af7393e09fe8"},"package":null}
{"files":{"Cargo.toml":"00c740f44b5681e3277e519d2e143e6edfbff186ca31a07ddce2eb46b803ddd7","src/array_emitter.rs":"bbc6528321f1d11d7c86c4f2bfdcfc9dced8f0b8b1c30c9f0a5355f300d196b6","src/ast_emitter.rs":"21f666ac7baa953f606d41a26d48cef8cf2c674d9b5953cf45bf539a9630a80b","src/block_emitter.rs":"78965260d87a66c5324d6f3bdfea0f1938f8037f70adde148dbb2db599d1b2c0","src/compilation_info.rs":"32ca7cdae514501de0d0d667ff2b27a3abf736ae207a23009c42eacbdffbd5b3","src/control_structures.rs":"bdb186e98c14fa4e769b23b3dee4376683e6c6530af0856d55c055aff4398b84","src/dis.rs":"4a335d813fa965482ca0f20a7b9295a55ce7625b577d42bd8b33b156b81c6306","src/emitter.rs":"14c06d1cf277a9017ad0feb440e598e9735b0d7c7a272a05bb7c604e34e8b8aa","src/emitter_scope.rs":"ba924ef541742a5c7be39d1b683bf3107241cf3ff5b8ff7f93987abc9f52e9d2","src/expression_emitter.rs":"f8e02785dffb179bbe9fe58e45bbfccc08adc3ad0a071a0073bed0feedc8ed9a","src/function_declaration_emitter.rs":"d76570732fd2d706f7861bf8be559ce998b25c8e028342831b759b17c54f7c13","src/lib.rs":"43285b5ddf164de2d90fc989ac25211e3e716751e6218df45f651ea75137d0f5","src/object_emitter.rs":"998423b3d6ef8797fadef6763803627df72fde292b1b34d6a41b2e66a331a181","src/reference_op_emitter.rs":"87c7e05934718921d72977746b93513850eab69465d33e190003cb86241f62b4","src/script_emitter.rs":"44a6be5ecdcde3c32d78d100a205d38be2591c7c2cc109967579af7393e09fe8"},"package":null}

Просмотреть файл

@ -83,6 +83,10 @@ impl<'alloc, 'opt> AstEmitter<'alloc, 'opt> {
self.scope_stack.lookup_name(name)
}
pub fn lookup_name_in_var(&mut self, name: SourceAtomSetIndex) -> NameLocation {
self.scope_stack.lookup_name_in_var(name)
}
fn emit_script(mut self, ast: &Script) -> Result<ScriptStencil, EmitError> {
let scope_data_map = &self.compilation_info.scope_data_map;
let function_declarations = &self.compilation_info.function_declarations;

Просмотреть файл

@ -62,6 +62,16 @@ pub enum ThrowMsgKind {
AssignToCall = 0,
IteratorNoThrow = 1,
CantDeleteSuper = 2,
PrivateDoubleInit = 3,
MissingPrivateOnGet = 4,
MissingPrivateOnSet = 5,
}
#[derive(Debug, Clone, Copy)]
pub enum ThrowCondition {
ThrowHas = 0,
ThrowHasNot = 1,
NoThrow = 2,
}
#[derive(Debug, Clone, Copy)]
@ -557,10 +567,6 @@ impl InstructionWriter {
self.emit_op(Opcode::InitHiddenElem);
}
pub fn init_private_elem(&mut self) {
self.emit_op(Opcode::InitPrivateElem);
}
pub fn init_prop_getter(&mut self, name_index: GCThingIndex) {
self.emit_op(Opcode::InitPropGetter);
self.write_g_c_thing_index(name_index);
@ -615,10 +621,6 @@ impl InstructionWriter {
self.emit_op(Opcode::CallElem);
}
pub fn get_private_elem(&mut self) {
self.emit_op(Opcode::GetPrivateElem);
}
pub fn length(&mut self, name_index: GCThingIndex) {
self.emit_op(Opcode::Length);
self.write_g_c_thing_index(name_index);
@ -642,10 +644,6 @@ impl InstructionWriter {
self.emit_op(Opcode::StrictSetElem);
}
pub fn set_private_elem(&mut self) {
self.emit_op(Opcode::SetPrivateElem);
}
pub fn del_prop(&mut self, name_index: GCThingIndex) {
self.emit_op(Opcode::DelProp);
self.write_g_c_thing_index(name_index);
@ -668,6 +666,12 @@ impl InstructionWriter {
self.emit_op(Opcode::HasOwn);
}
pub fn check_private_field(&mut self, throw_condition: ThrowCondition, msg_kind: ThrowMsgKind) {
self.emit_op(Opcode::CheckPrivateField);
self.write_u8(throw_condition as u8);
self.write_u8(msg_kind as u8);
}
pub fn super_base(&mut self) {
self.emit_op(Opcode::SuperBase);
}

Просмотреть файл

@ -188,6 +188,13 @@ impl EmitterScope {
EmitterScope::Lexical(scope) => scope.has_environment_object(),
}
}
fn is_var_scope(&self) -> bool {
match self {
EmitterScope::Global(_) => true,
EmitterScope::Lexical(_) => false,
}
}
}
/// Stack that tracks the current scope chain while emitting bytecode.
@ -367,6 +374,31 @@ impl EmitterScopeStack {
NameLocation::Dynamic
}
/// Just like lookup_name, but only in var scope.
pub fn lookup_name_in_var(&mut self, name: SourceAtomSetIndex) -> NameLocation {
let mut hops = EnvironmentHops::new(0);
for scope in self.scope_stack.iter().rev() {
if scope.is_var_scope() {
if let Some(loc) = scope.lookup_name(name) {
return match loc {
NameLocation::EnvironmentCoord(orig_hops, slot, kind) => {
debug_assert!(u8::from(orig_hops) == 0u8);
NameLocation::EnvironmentCoord(hops, slot, kind)
}
_ => loc,
};
}
}
if scope.has_environment_object() {
hops.next();
}
}
NameLocation::Dynamic
}
pub fn current_depth(&self) -> EmitterScopeDepth {
EmitterScopeDepth {
index: self.scope_stack.len() - 1,

Просмотреть файл

@ -1,6 +1,8 @@
use crate::ast_emitter::AstEmitter;
use crate::emitter::EmitError;
use crate::reference_op_emitter::{AssignmentEmitter, DeclarationEmitter, NameReferenceEmitter};
use crate::reference_op_emitter::{
AssignmentEmitter, DeclarationEmitter, GetNameEmitter, NameReferenceEmitter,
};
use ast::source_atom_set::SourceAtomSetIndex;
use stencil::gcthings::GCThingIndex;
use stencil::script::ScriptStencilIndex;
@ -47,7 +49,11 @@ impl LexicalFunctionDeclarationEmitter {
Ok(())
},
}
.emit(emitter)
.emit(emitter)?;
emitter.emit.pop();
Ok(())
}
}
@ -58,15 +64,25 @@ pub struct AnnexBFunctionDeclarationEmitter {
impl AnnexBFunctionDeclarationEmitter {
pub fn emit(self, emitter: &mut AstEmitter) -> Result<(), EmitError> {
LexicalFunctionDeclarationEmitter {
name: self.name,
fun_index: self.fun_index,
}
.emit(emitter)?;
AssignmentEmitter {
lhs: |emitter| {
Ok(NameReferenceEmitter { name: self.name }.emit_for_assignment(emitter))
Ok(NameReferenceEmitter { name: self.name }.emit_for_var_assignment(emitter))
},
rhs: |emitter| {
emitter.emit.lambda(self.fun_index);
GetNameEmitter { name: self.name }.emit(emitter);
Ok(())
},
}
.emit(emitter)
.emit(emitter)?;
emitter.emit.pop();
Ok(())
}
}

13
third_party/rust/jsparagus-emitter/src/lib.rs поставляемый
Просмотреть файл

@ -23,7 +23,7 @@ use crate::compilation_info::CompilationInfo;
use ast::source_atom_set::SourceAtomSet;
use ast::source_slice_list::SourceSliceList;
use scope::ScopePassResult;
use scope::{ScopeBuildError, ScopePassResult};
use stencil::result::EmitResult;
pub fn emit<'alloc>(
@ -38,7 +38,18 @@ pub fn emit<'alloc>(
function_stencil_indices,
function_declaration_properties,
functions,
error,
} = scope::generate_scope_data(ast);
// Error case for scope analysis will be removed once all syntax is
// supported. Use field instead of Result type here for simplicity.
match error {
Some(ScopeBuildError::NotImplemented(s)) => {
return Err(EmitError::NotImplemented(s));
}
None => {}
}
let compilation_info = CompilationInfo::new(
atoms,
slices,

Просмотреть файл

@ -382,9 +382,12 @@ impl NameReferenceEmitter {
CallReference::new(CallKind::Normal)
}
pub fn emit_for_assignment(self, emitter: &mut AstEmitter) -> AssignmentReference {
pub fn emit_for_assignment_with_loc(
self,
emitter: &mut AstEmitter,
loc: NameLocation,
) -> AssignmentReference {
let name_index = emitter.emit.get_atom_gcthing_index(self.name);
let loc = emitter.lookup_name(self.name);
// [stack]
@ -428,6 +431,18 @@ impl NameReferenceEmitter {
}
}
pub fn emit_for_assignment(self, emitter: &mut AstEmitter) -> AssignmentReference {
let loc = emitter.lookup_name(self.name);
self.emit_for_assignment_with_loc(emitter, loc)
}
/// Ignore any lexical scope and assign to var scope.
/// Used by Annex B function.
pub fn emit_for_var_assignment(self, emitter: &mut AstEmitter) -> AssignmentReference {
let loc = emitter.lookup_name_in_var(self.name);
self.emit_for_assignment_with_loc(emitter, loc)
}
pub fn emit_for_declaration(self, emitter: &mut AstEmitter) -> DeclarationReference {
let name_index = emitter.emit.get_atom_gcthing_index(self.name);
let loc = emitter.lookup_name(self.name);

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"553be3c198fe555913bbeb7473b24e0e1fff12e48890a2e399b311df8a97c814","src/ast_builder.rs":"ff3cd595b70e8359b4ce9ce78b066890a78572fee923fcd8cadd029b8fbc4d7a","src/context_stack.rs":"29331d03cd4c8ee9283cb426ebe893b7ba6ad6d8a69016399c4d92a81cb1363b","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_error_checker.rs":"150a106a8f0901b72ae40581f0c12f785983514cbc9042404ed6cf4315693d60","src/early_errors.rs":"8674454af7ac5efe51eb6a8e2abe088aad5560e0a0bd88a3eae66c90f1527149","src/error.rs":"507e4dd9c66720f3da2db135c3024392d8aaac5ccdb90c7f7463ccb2eff7efa8","src/lib.rs":"b74105a84c4a141b880439f9ec724f7dc08224342be08a73490ac2c01410af08","src/parser_tables_generated.rs":"d89a8f271e14aea5fedfe96bd69a22f37d1b99be74e65897fa02bd3d9d821f0a","src/stack_value_generated.rs":"ce8567634ff2bb818593f56c0589b4ba2d508704db943eb0778d79dfd19cce36","src/token.rs":"479f4cb97d2e6bc654a70634f3809817cc73eaf749c845643beb3556b9ead383","src/traits/mod.rs":"ba74c71f7218027f8188247bc64df243117613fbc9893d40799402ef1e6dbf59"},"package":null}
{"files":{"Cargo.toml":"553be3c198fe555913bbeb7473b24e0e1fff12e48890a2e399b311df8a97c814","src/ast_builder.rs":"ff3cd595b70e8359b4ce9ce78b066890a78572fee923fcd8cadd029b8fbc4d7a","src/context_stack.rs":"29331d03cd4c8ee9283cb426ebe893b7ba6ad6d8a69016399c4d92a81cb1363b","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_error_checker.rs":"150a106a8f0901b72ae40581f0c12f785983514cbc9042404ed6cf4315693d60","src/early_errors.rs":"8674454af7ac5efe51eb6a8e2abe088aad5560e0a0bd88a3eae66c90f1527149","src/error.rs":"507e4dd9c66720f3da2db135c3024392d8aaac5ccdb90c7f7463ccb2eff7efa8","src/lib.rs":"b74105a84c4a141b880439f9ec724f7dc08224342be08a73490ac2c01410af08","src/parser_tables_generated.rs":"ead73abf96c1ff968faa25b92d1e77802530c4fcce68e4529242fe1d4e359d10","src/stack_value_generated.rs":"ce8567634ff2bb818593f56c0589b4ba2d508704db943eb0778d79dfd19cce36","src/token.rs":"479f4cb97d2e6bc654a70634f3809817cc73eaf749c845643beb3556b9ead383","src/traits/mod.rs":"ba74c71f7218027f8188247bc64df243117613fbc9893d40799402ef1e6dbf59"},"package":null}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"500dc18629fd32dd7019a7967535d6fc53bf94dc7e6c305be46f4040d47cac9e","src/builder.rs":"261ec973cf160f328deaab879ff7fb83f72c9a47f04735f92e5ff9cf87d175d3","src/data.rs":"a3cf1e7b1a96a619bcb8bd87f39bbd44dda0fc554c70a5d4d45b70eb03e69401","src/free_name_tracker.rs":"322228be4262d79d148f954a656b4f09fe953b324393fcc5925675c0e7777828","src/lib.rs":"16b1fe6659f977a547df15922b2864b81c4d452ad198409db4374e88c6df68bc","src/pass.rs":"fc419b742aba9572dbf0617c58031342e7739bfc8ed582212686773e4b581f72"},"package":null}
{"files":{"Cargo.toml":"500dc18629fd32dd7019a7967535d6fc53bf94dc7e6c305be46f4040d47cac9e","src/builder.rs":"e3573ad61b57b4b62ac6d4715271b696ac1774504a0645e69451148a05a31010","src/data.rs":"a3cf1e7b1a96a619bcb8bd87f39bbd44dda0fc554c70a5d4d45b70eb03e69401","src/free_name_tracker.rs":"322228be4262d79d148f954a656b4f09fe953b324393fcc5925675c0e7777828","src/lib.rs":"529f7598a3034b347a20307a752b467091d820df6be67ebc4a3bd8e02568511b","src/pass.rs":"c282c99354b1f4f92d211e32c560a87307725332b8e6a7447b31c92515dd0b77"},"package":null}

Просмотреть файл

@ -288,8 +288,14 @@ enum ScopeKind {
/// LexicallyScopedDeclarations::LexicalDeclarationWithConst
Const,
/// Pushed when entering function, to catch function name.
FunctionName,
/// Pushed when entering function parameter, to disable FunctionName's
/// effect.
/// Equivalent to the case there's no kind on the stack.
FunctionParametersAndBody,
FormalParameter,
#[allow(dead_code)]
@ -394,21 +400,9 @@ impl PossiblyAnnexBFunctionList {
self.functions.remove(&name);
}
fn mark_annex_b(
&self,
scopes: &mut ScopeDataList,
function_declaration_properties: &mut FunctionDeclarationPropertyMap,
) {
fn mark_annex_b(&self, function_declaration_properties: &mut FunctionDeclarationPropertyMap) {
for functions in &mut self.functions.values() {
for fun in functions {
let scope = scopes.get_mut(fun.owner_scope_index);
match scope {
ScopeData::Lexical(data) => {
data.mark_annex_b_function(fun.name, fun.binding_index.into());
}
_ => panic!("unexpected scope pointed by Annex B function"),
}
function_declaration_properties.mark_annex_b(fun.script_index);
}
}
@ -580,7 +574,6 @@ impl GlobalScopeBuilder {
fn perform_annex_b(
&mut self,
scopes: &mut ScopeDataList,
function_declaration_properties: &mut FunctionDeclarationPropertyMap,
possibly_annex_b_functions: &mut PossiblyAnnexBFunctionList,
) {
@ -662,12 +655,11 @@ impl GlobalScopeBuilder {
// Step 2.d.ii.1.b.iii.4. Perform
// ? genv.SetMutableBinding(F, fobj, false).
// Step 2.d.ii.1.b.iii.5. Return NormalCompletion(empty).
possibly_annex_b_functions.mark_annex_b(scopes, function_declaration_properties);
possibly_annex_b_functions.mark_annex_b(function_declaration_properties);
}
fn into_scope_data(
mut self,
scopes: &mut ScopeDataList,
function_declaration_properties: &mut FunctionDeclarationPropertyMap,
possibly_annex_b_functions: &mut PossiblyAnnexBFunctionList,
) -> ScopeData {
@ -680,11 +672,7 @@ impl GlobalScopeBuilder {
//
// NOTE: Reordered here to reflect the change to
// self.declared_var_names.
self.perform_annex_b(
scopes,
function_declaration_properties,
possibly_annex_b_functions,
);
self.perform_annex_b(function_declaration_properties, possibly_annex_b_functions);
// Step 12.a.i.i If vn is not an element of declaredFunctionNames, then
self.remove_function_names_from_var_names();
@ -1020,10 +1008,6 @@ struct FunctionParametersScopeBuilder {
strict: bool,
/// Step 5. Let parameterNames be the BoundNames of formals.
///
/// NOTE: This is used only for checking duplication.
/// The actual list of parameters is stored in
/// positional_parameter_names and non_positional_parameter_names.
parameter_names: HashSet<SourceAtomSetIndex>,
/// Step 17. Else if "arguments" is an element of parameterNames, then
@ -1244,7 +1228,6 @@ impl FunctionParametersScopeBuilder {
fn perform_annex_b(
&mut self,
scopes: &mut ScopeDataList,
function_declaration_properties: &mut FunctionDeclarationPropertyMap,
possibly_annex_b_functions: &mut PossiblyAnnexBFunctionList,
body_scope_builder: &mut FunctionBodyScopeBuilder,
@ -1281,6 +1264,9 @@ impl FunctionParametersScopeBuilder {
for n in &body_scope_builder.const_names {
possibly_annex_b_functions.remove_if_exists(*n);
}
for n in &self.parameter_names {
possibly_annex_b_functions.remove_if_exists(*n);
}
// Step 1.a.ii.1. NOTE: A var binding for F is only instantiated here
// if it is neither a VarDeclaredName, the name of a
@ -1310,12 +1296,11 @@ impl FunctionParametersScopeBuilder {
// Step 1.a.ii.3.c. Let fobj be ! benv.GetBindingValue(F, false).
// Step 1.a.ii.3.d. Perform ! fenv.SetMutableBinding(F, fobj, false).
// Step 1.a.ii.3.e. Return NormalCompletion(empty).
possibly_annex_b_functions.mark_annex_b(scopes, function_declaration_properties);
possibly_annex_b_functions.mark_annex_b(function_declaration_properties);
}
fn into_scope_data_set(
mut self,
scopes: &mut ScopeDataList,
function_declaration_properties: &mut FunctionDeclarationPropertyMap,
possibly_annex_b_functions: &mut PossiblyAnnexBFunctionList,
enclosing: ScopeIndex,
@ -1382,7 +1367,6 @@ impl FunctionParametersScopeBuilder {
// NOTE: Reordered here to reflect the change to
// body_scope_builder.var_names.
self.perform_annex_b(
scopes,
function_declaration_properties,
possibly_annex_b_functions,
&mut body_scope_builder,
@ -1391,8 +1375,7 @@ impl FunctionParametersScopeBuilder {
let has_extra_body_var_scope = self.has_parameter_expressions;
// NOTE: Names in `body_scope_builder.var_names` is skipped if
// parameter has the same name, or it's `arguments`,
// at step 27.c.i.
// it's `arguments`, at step 27.c.i.
// The count here isn't the exact number of var bindings, but
// it's fine given FunctionScopeData::new doesn't require the
// exact number, but just maximum number.
@ -1489,7 +1472,6 @@ impl FunctionParametersScopeBuilder {
// Step 27.b. Let instantiatedVarNames be a copy of the List
// parameterBindings.
// (implicit)
// Step 27.c. For each n in varNames, do
for n in &body_scope_builder.var_names {
@ -1498,7 +1480,7 @@ impl FunctionParametersScopeBuilder {
// Step 27.c.i.1. Append n to instantiatedVarNames.
//
// NOTE: var_names is already unique.
// Check against parameters here.
// Check against parameters and `arguments` here.
if self.parameter_names.contains(n)
|| (arguments_object_needed && *n == CommonSourceAtomSetIndices::arguments())
{
@ -1535,8 +1517,7 @@ impl FunctionParametersScopeBuilder {
// declarations in the function body.
// Step 28.b. Let varEnv be NewDeclarativeEnvironment(env).
// Step 28.c. Let varEnvRec be varEnv's EnvironmentRecord.
// Step 28.d. Set the VariableEnvironment of calleeContext to
// Step 28.c. Set the VariableEnvironment of calleeContext to
// varEnv.
let mut data = VarScopeData::new(
body_scope_builder.var_names.len(),
@ -1544,32 +1525,32 @@ impl FunctionParametersScopeBuilder {
/* encloding= */ self.scope_index,
);
// Step 28.e. Let instantiatedVarNames be a new empty List.
// NOTE: var_names is already unique. Nothing to check here.
// Step 28.d. Let instantiatedVarNames be a new empty List.
// Step 28.f. For each n in varNames, do
// Step 28.e. For each n in varNames, do
for n in &body_scope_builder.var_names {
// Step 28.f.i. If n is not an element of instantiatedVarNames, then
// Step 28.f.i.1. Append n to instantiatedVarNames.
// (implicit)
// Step 28.e.i. If n is not an element of instantiatedVarNames, then
// Step 28.e.i.1. Append n to instantiatedVarNames.
//
// NOTE: var_names is already unique.
// Step 28.f.i.2. Perform
// ! varEnvRec.CreateMutableBinding(n, false).
// Step 28.e.i.2. Perform
// ! varEnv.CreateMutableBinding(n, false).
let is_closed_over = body_scope_builder.base.name_tracker.is_closed_over_def(n);
data.base
.bindings
.push(BindingName::new(*n, is_closed_over));
// Step 28.f.i.3. If n is not an element of parameterBindings or if
// Step 28.e.i.3. If n is not an element of parameterBindings or if
// n is an element of functionNames, let
// initialValue be undefined.
// Step 28.f.i.4. Else,
// Step 28.f.i.4.a. Let initialValue be
// ! envRec.GetBindingValue(n, false).
// Step 28.f.i.5. Call varEnvRec.InitializeBinding(n, initialValue).
// Step 28.e.i.4. Else,
// Step 28.e.i.4.a. Let initialValue be
// ! env.GetBindingValue(n, false).
// Step 28.e.i.5. Call varEnv.InitializeBinding(n, initialValue).
// (done in emitter)
// Step 28.f.i.6. NOTE: A var with the same name as a formal
// Step 28.e.i.6. NOTE: A var with the same name as a formal
// parameter initially has the same value as the
// corresponding initialized parameter.
}
@ -1730,6 +1711,9 @@ impl FunctionBodyScopeBuilder {
// FunctionDeclarationInstantiation ( func, argumentsList )
// https://tc39.es/ecma262/#sec-functiondeclarationinstantiation
//
// Step 9. Let varNames be the VarDeclaredNames of code.
self.var_names.insert(name);
// Step 14. For each d in varDeclarations, in reverse list order, do
// Step 14.a. If d is neither a VariableDeclaration nor a ForBinding
// nor a BindingIdentifier , then
@ -1981,7 +1965,10 @@ impl ScopeBuilderStack {
fn pop_block(&mut self) -> BlockScopeBuilder {
match self.pop() {
ScopeBuilder::Block(builder) => builder,
ScopeBuilder::Block(builder) => {
self.update_closed_over_bindings_for_lazy(&builder.base);
builder
}
_ => panic!("unmatching scope builder"),
}
}
@ -1992,7 +1979,10 @@ impl ScopeBuilderStack {
fn pop_function_expression(&mut self) -> FunctionExpressionScopeBuilder {
match self.pop() {
ScopeBuilder::FunctionExpression(builder) => builder,
ScopeBuilder::FunctionExpression(builder) => {
self.update_closed_over_bindings_for_lazy(&builder.base);
builder
}
_ => panic!("unmatching scope builder"),
}
}
@ -2001,11 +1991,32 @@ impl ScopeBuilderStack {
self.stack.push(ScopeBuilder::FunctionParameters(builder))
}
fn pop_function_parameters(&mut self) -> FunctionParametersScopeBuilder {
match self.pop() {
fn pop_function_parameters_and_body(
&mut self,
) -> (FunctionParametersScopeBuilder, FunctionBodyScopeBuilder) {
let body_scope_builder = match self.pop() {
ScopeBuilder::FunctionBody(builder) => builder,
_ => panic!("unmatching scope builder"),
};
let parameter_scope_builder = match self.pop() {
ScopeBuilder::FunctionParameters(builder) => builder,
_ => panic!("unmatching scope builder"),
};
let has_extra_body_var_scope = parameter_scope_builder.has_parameter_expressions;
if has_extra_body_var_scope {
self.update_closed_over_bindings_for_lazy(&body_scope_builder.base);
self.update_closed_over_bindings_for_lazy(&parameter_scope_builder.base);
} else {
self.update_closed_over_bindings_for_lazy_with_parameters_and_body(
&parameter_scope_builder.base,
&body_scope_builder.base,
);
}
(parameter_scope_builder, body_scope_builder)
}
fn get_function_parameters<'a>(&'a mut self) -> &'a mut FunctionParametersScopeBuilder {
@ -2019,10 +2030,41 @@ impl ScopeBuilderStack {
self.stack.push(ScopeBuilder::FunctionBody(builder))
}
fn pop_function_body(&mut self) -> FunctionBodyScopeBuilder {
match self.pop() {
ScopeBuilder::FunctionBody(builder) => builder,
_ => panic!("unmatching scope builder"),
fn update_closed_over_bindings_for_lazy(&mut self, builder: &BaseScopeBuilder) {
match self.closed_over_bindings_for_lazy.last_mut() {
Some(bindings) => {
for name in builder.name_tracker.defined_and_closed_over_vars() {
bindings.push(Some(*name));
}
bindings.push(None);
}
None => {
// We're leaving lexical scope in top-level script.
}
}
}
// Just like update_closed_over_bindings_for_lazy, but merge
// 2 builders for parameters and body, in case the function doesn't have
// extra body scope.
fn update_closed_over_bindings_for_lazy_with_parameters_and_body(
&mut self,
builder1: &BaseScopeBuilder,
builder2: &BaseScopeBuilder,
) {
match self.closed_over_bindings_for_lazy.last_mut() {
Some(bindings) => {
for name in builder1.name_tracker.defined_and_closed_over_vars() {
bindings.push(Some(*name));
}
for name in builder2.name_tracker.defined_and_closed_over_vars() {
bindings.push(Some(*name));
}
bindings.push(None);
}
None => {
// We're leaving lexical scope in top-level script.
}
}
}
@ -2034,18 +2076,6 @@ impl ScopeBuilderStack {
let inner_base = inner.base();
let outer_base = outer.base_mut();
match self.closed_over_bindings_for_lazy.last_mut() {
Some(bindings) => {
for name in inner_base.name_tracker.defined_and_closed_over_vars() {
bindings.push(Some(*name));
}
bindings.push(None);
}
None => {
// We're leaving lexical scope in top-level script.
}
}
// When construct such as `eval`, `with` and `delete` access
// name dynamically in inner scopes, we have to propagate this
// flag to the outer scope such that we prevent optimizations.
@ -2284,6 +2314,13 @@ impl FunctionScriptStencilBuilder {
}
}
/// Scope builder shouldn't raise any error except not-implemented.
/// This struct should eventually be removed.
#[derive(Clone, Debug)]
pub enum ScopeBuildError {
NotImplemented(&'static str),
}
/// Receives method calls telling about a JS script and builds a
/// `ScopeDataMap`.
///
@ -2309,6 +2346,8 @@ pub struct ScopeDataMapBuilder {
function_declaration_properties: FunctionDeclarationPropertyMap,
possibly_annex_b_functions: PossiblyAnnexBFunctionList,
error: Option<ScopeBuildError>,
}
impl ScopeDataMapBuilder {
@ -2322,6 +2361,13 @@ impl ScopeDataMapBuilder {
function_stencil_builder: FunctionScriptStencilBuilder::new(),
function_declaration_properties: FunctionDeclarationPropertyMap::new(),
possibly_annex_b_functions: PossiblyAnnexBFunctionList::new(),
error: None,
}
}
fn set_error(&mut self, e: ScopeBuildError) {
if self.error.is_none() {
self.error = Some(e);
}
}
@ -2373,7 +2419,6 @@ impl ScopeDataMapBuilder {
// Steps 12-18.
let scope_index = builder.scope_index;
let scope = builder.into_scope_data(
&mut self.scopes,
&mut self.function_declaration_properties,
&mut self.possibly_annex_b_functions,
);
@ -2461,7 +2506,9 @@ impl ScopeDataMapBuilder {
if self.scope_kind_stack.is_empty() {
// FIXME
// Do nothing for unsupported case.
// Emitter will return NotImplemented anyway.
self.set_error(ScopeBuildError::NotImplemented(
"Unsupported binding identifier",
));
return;
}
@ -2473,6 +2520,14 @@ impl ScopeDataMapBuilder {
self.builder_stack.innermost().set_function_name(name);
self.function_stencil_builder.set_function_name(name);
}
ScopeKind::FunctionParametersAndBody => {
// FIXME
// Do nothing for unsupported case.
self.set_error(ScopeBuildError::NotImplemented(
"Unsupported binding identifier",
));
return;
}
ScopeKind::FormalParameter => self.builder_stack.innermost().declare_param(name),
_ => panic!("Not implemeneted"),
}
@ -2496,6 +2551,13 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
if is_generator || is_async {
// FIXME: Generator and async should mark all bindings closed over.
self.set_error(ScopeBuildError::NotImplemented(
"Generator or async function",
));
}
let fun_index = self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::function_declaration(is_generator, is_async),
@ -2529,6 +2591,11 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
// FIXME: Anonymous function expression needs inferred name.
self.set_error(ScopeBuildError::NotImplemented(
"Function expression (name analysis)",
));
let index = self.scopes.allocate();
let builder = FunctionExpressionScopeBuilder::new(index);
self.non_global.insert(fun, index);
@ -2563,6 +2630,9 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
// FIXME: Support PropertyName as function name.
self.set_error(ScopeBuildError::NotImplemented("Method (name calculation)"));
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::method(is_generator, is_async),
@ -2581,6 +2651,9 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
// FIXME: Support PropertyName as function name.
self.set_error(ScopeBuildError::NotImplemented("Getter (name calculation)"));
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::getter(),
@ -2607,6 +2680,9 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
// FIXME: Support PropertyName as function name.
self.set_error(ScopeBuildError::NotImplemented("Setter (name calculation)"));
self.function_stencil_builder.enter(
fun,
FunctionSyntaxKind::setter(),
@ -2637,6 +2713,12 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
// FIXME: Arrow function needs to access enclosing scope's
// `this` and `arguments`.
self.set_error(ScopeBuildError::NotImplemented(
"Arrow function (special name handling)",
));
self.function_stencil_builder.enter(
params,
FunctionSyntaxKind::arrow(is_async),
@ -2655,6 +2737,9 @@ impl ScopeDataMapBuilder {
where
T: SourceLocationAccessor + NodeTypeIdAccessor,
{
self.scope_kind_stack
.push(ScopeKind::FunctionParametersAndBody);
self.builder_stack
.closed_over_bindings_for_lazy
.push(Vec::new());
@ -2736,10 +2821,12 @@ impl ScopeDataMapBuilder {
}
pub fn after_function_body(&mut self) {
let body_scope_builder = self.builder_stack.pop_function_body();
let parameter_scope_builder = self.builder_stack.pop_function_parameters();
let (parameter_scope_builder, body_scope_builder) =
self.builder_stack.pop_function_parameters_and_body();
let enclosing = self.builder_stack.current_scope_index();
let has_extra_body_var_scope = parameter_scope_builder.has_parameter_expressions;
self.function_stencil_builder.add_closed_over_bindings(
self.builder_stack
.closed_over_bindings_for_lazy
@ -2793,7 +2880,6 @@ impl ScopeDataMapBuilder {
// Step 1. Perform ? FunctionDeclarationInstantiation(functionObject,
// argumentsList).
let scope_data_set = parameter_scope_builder.into_scope_data_set(
&mut self.scopes,
&mut self.function_declaration_properties,
&mut self.possibly_annex_b_functions,
enclosing,
@ -2801,9 +2887,13 @@ impl ScopeDataMapBuilder {
);
self.possibly_annex_b_functions.clear();
let has_extra_body_var = match &scope_data_set.extra_body_var {
ScopeData::Var(_) => true,
_ => false,
match &scope_data_set.extra_body_var {
ScopeData::Var(_) => {
debug_assert!(has_extra_body_var_scope);
}
_ => {
debug_assert!(!has_extra_body_var_scope);
}
};
let fun_stencil = self.function_stencil_builder.current_mut();
@ -2816,8 +2906,14 @@ impl ScopeDataMapBuilder {
panic!("Unexpected scope data for function");
}
if has_extra_body_var {
fun_stencil.set_function_has_extra_body_var_scope();
if has_extra_body_var_scope {
let extra_body_var_scope = match &scope_data_set.extra_body_var {
ScopeData::Var(scope) => scope,
_ => panic!(""),
};
if extra_body_var_scope.base.bindings.len() > 0 {
fun_stencil.set_function_has_extra_body_var_scope();
}
}
if has_mapped_arguments {
@ -2851,7 +2947,7 @@ impl ScopeDataMapBuilder {
// NOTE: This is implementation-specfic optimization, and has
// no corresponding steps in the spec.
if var_names_has_arguments {
if has_extra_body_var {
if has_extra_body_var_scope {
try_declare_arguments = true;
} else if !parameter_has_arguments {
uses_arguments = true;
@ -2861,8 +2957,8 @@ impl ScopeDataMapBuilder {
if try_declare_arguments {
// if extra body var scope exists, the existence of `arguments`
// binding in function body doesn't affect.
let declare_arguments =
!parameter_has_arguments && (has_extra_body_var || !body_has_defined_arguments);
let declare_arguments = !parameter_has_arguments
&& (has_extra_body_var_scope || !body_has_defined_arguments);
if declare_arguments {
fun_stencil.set_should_declare_arguments();
@ -2879,6 +2975,15 @@ impl ScopeDataMapBuilder {
if bindings_accessed_dynamically {
fun_stencil.set_always_needs_args_obj();
}
if has_used_this {
// FIXME
// IsLikelyConstructorWrapper should be set if
// `.apply()` is used and `return` isn't used.
self.set_error(ScopeBuildError::NotImplemented(
"IsLikelyConstructorWrapper condition",
));
}
}
}
@ -2888,10 +2993,22 @@ impl ScopeDataMapBuilder {
.populate(var_scope_index, scope_data_set.extra_body_var);
self.scopes
.populate(lexical_scope_index, scope_data_set.lexical);
self.scope_kind_stack
.pop(ScopeKind::FunctionParametersAndBody);
}
pub fn before_catch_clause(&mut self) {
// FIXME: NewDeclarativeEnvironment for catch parameter.
self.set_error(ScopeBuildError::NotImplemented("try-catch"));
}
#[allow(dead_code)]
pub fn on_direct_eval(&mut self) {
// FIXME: Propagate to script flags.
self.set_error(ScopeBuildError::NotImplemented(
"direct eval (script flags)",
));
if let Some(parameter_scope_builder) =
self.builder_stack.maybe_innermost_function_parameters()
{
@ -2903,6 +3020,31 @@ impl ScopeDataMapBuilder {
.base_mut()
.bindings_accessed_dynamically = true;
}
pub fn on_class(&mut self) {
// FIXME: NewDeclarativeEnvironment for class tail.
self.set_error(ScopeBuildError::NotImplemented("class"));
}
pub fn on_with(&mut self) {
// FIXME: Propagate to script flags.
self.set_error(ScopeBuildError::NotImplemented("with statement"));
}
pub fn on_delete(&mut self) {
// FIXME: Propagate to script flags.
self.set_error(ScopeBuildError::NotImplemented("delete operator"));
}
pub fn on_lexical_for(&mut self) {
// FIXME: NewDeclarativeEnvironment in for statement
self.set_error(ScopeBuildError::NotImplemented("lexical for"));
}
pub fn on_switch(&mut self) {
// FIXME: NewDeclarativeEnvironment in for case block
self.set_error(ScopeBuildError::NotImplemented("switch"));
}
}
pub struct ScopeDataMapAndScriptStencilList {
@ -2910,6 +3052,7 @@ pub struct ScopeDataMapAndScriptStencilList {
pub function_stencil_indices: AssociatedData<ScriptStencilIndex>,
pub function_declaration_properties: FunctionDeclarationPropertyMap,
pub functions: ScriptStencilList,
pub error: Option<ScopeBuildError>,
}
impl From<ScopeDataMapBuilder> for ScopeDataMapAndScriptStencilList {
@ -2923,6 +3066,7 @@ impl From<ScopeDataMapBuilder> for ScopeDataMapAndScriptStencilList {
function_stencil_indices: builder.function_stencil_builder.function_stencil_indices,
function_declaration_properties: builder.function_declaration_properties,
functions: builder.function_stencil_builder.functions,
error: builder.error,
}
}
}

1
third_party/rust/jsparagus-scope/src/lib.rs поставляемый
Просмотреть файл

@ -19,6 +19,7 @@ extern crate jsparagus_stencil as stencil;
use ast::visit::Pass;
pub use builder::ScopeBuildError;
pub use pass::ScopePassResult;
/// Visit all nodes in the AST, and create a scope data.

86
third_party/rust/jsparagus-scope/src/pass.rs поставляемый
Просмотреть файл

@ -7,7 +7,7 @@
//! but the goal is to do this analysis as part of the parse phase, even when
//! no AST is built. So we try to keep AST use separate from the analysis code.
use crate::builder::{ScopeDataMapAndScriptStencilList, ScopeDataMapBuilder};
use crate::builder::{ScopeBuildError, ScopeDataMapAndScriptStencilList, ScopeDataMapBuilder};
use crate::data::FunctionDeclarationPropertyMap;
use ast::arena;
use ast::associated_data::AssociatedData;
@ -24,6 +24,7 @@ pub struct ScopePassResult<'alloc> {
pub function_stencil_indices: AssociatedData<ScriptStencilIndex>,
pub function_declaration_properties: FunctionDeclarationPropertyMap,
pub functions: ScriptStencilList,
pub error: Option<ScopeBuildError>,
}
/// The top-level struct responsible for extracting the necessary information
@ -53,6 +54,7 @@ impl<'alloc> From<ScopePass<'alloc>> for ScopePassResult<'alloc> {
function_stencil_indices,
function_declaration_properties,
functions,
error,
} = pass.builder.into();
ScopePassResult {
scope_data_map,
@ -60,6 +62,7 @@ impl<'alloc> From<ScopePass<'alloc>> for ScopePassResult<'alloc> {
function_stencil_indices,
function_declaration_properties,
functions,
error,
}
}
}
@ -274,4 +277,85 @@ impl<'alloc> Pass<'alloc> for ScopePass<'alloc> {
) {
self.builder.after_function_body();
}
fn enter_catch_clause(&mut self, _ast: &'alloc CatchClause<'alloc>) {
self.builder.before_catch_clause();
}
fn enter_call_expression(&mut self, ast: &'alloc CallExpression<'alloc>) {
match &ast.callee {
ExpressionOrSuper::Expression(expr) => match &**expr {
Expression::IdentifierExpression(IdentifierExpression { name, .. }) => {
if name.value == CommonSourceAtomSetIndices::eval() {
self.builder.on_direct_eval();
}
}
_ => {}
},
_ => {}
}
}
fn enter_class_declaration(&mut self, _ast: &'alloc ClassDeclaration<'alloc>) {
self.builder.on_class();
}
fn enter_class_expression(&mut self, _ast: &'alloc ClassExpression<'alloc>) {
self.builder.on_class();
}
fn enter_enum_statement_variant_with_statement(
&mut self,
_object: &'alloc arena::Box<'alloc, Expression<'alloc>>,
_body: &'alloc arena::Box<'alloc, Statement<'alloc>>,
) {
self.builder.on_with();
}
fn visit_enum_unary_operator_variant_delete(&mut self) {
self.builder.on_delete();
}
fn enter_enum_statement_variant_for_statement(
&mut self,
init: &'alloc Option<VariableDeclarationOrExpression<'alloc>>,
_test: &'alloc Option<arena::Box<'alloc, Expression<'alloc>>>,
_update: &'alloc Option<arena::Box<'alloc, Expression<'alloc>>>,
_block: &'alloc arena::Box<'alloc, Statement<'alloc>>,
) {
match init {
Some(VariableDeclarationOrExpression::VariableDeclaration(decl)) => match decl.kind {
VariableDeclarationKind::Let { .. } | VariableDeclarationKind::Const { .. } => {
self.builder.on_lexical_for();
}
_ => {}
},
_ => {}
}
}
fn enter_enum_statement_variant_for_in_statement(
&mut self,
left: &'alloc VariableDeclarationOrAssignmentTarget<'alloc>,
_right: &'alloc arena::Box<'alloc, Expression<'alloc>>,
_block: &'alloc arena::Box<'alloc, Statement<'alloc>>,
) {
match left {
VariableDeclarationOrAssignmentTarget::VariableDeclaration(decl) => match decl.kind {
VariableDeclarationKind::Let { .. } | VariableDeclarationKind::Const { .. } => {
self.builder.on_lexical_for();
}
_ => {}
},
_ => {}
}
}
fn enter_enum_statement_variant_switch_statement(
&mut self,
_discriminant: &'alloc arena::Box<'alloc, Expression<'alloc>>,
_cases: &'alloc arena::Vec<'alloc, SwitchCase<'alloc>>,
) {
self.builder.on_switch();
}
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"f5439990185662ab31de12c07ef0c842438e3207bdfecc4fa6a9e4d4bea8a0d3","src/bytecode_offset.rs":"2aa7ba8c3cfbbd832092e65b599ab1c5a28d784ccc65d9e351bba656421b9a69","src/copy/AsyncFunctionResolveKind.h":"3851ecbb4728257595dd6e900749d1d8e02558574c00424a7ff0e3ca007fa6ec","src/copy/BytecodeFormatFlags.h":"f495a25e113a92071514d17483fbd821c9e9b24b164cae1c6b5ad2dd7491a945","src/copy/CheckIsObjectKind.h":"8f0e112396d966c9221a743d353f62671e04cdace7dd49a59898d94ba0f621b7","src/copy/FunctionFlags.h":"e2578c5747f298d03d81fa2b248e4b36800ac8d42d9d6ef801ebb1bc13bc3960","src/copy/FunctionPrefixKind.h":"f540a5c646a519b2d61aa27e4be865e08a31438def00ad5ba4ba2982ad1f2275","src/copy/GeneratorAndAsyncKind.h":"301668ce705970a51abfa94f89fd5db29ef5f129525110860e9e9bf7586ef187","src/copy/GeneratorResumeKind.h":"9e3cd9dc9c7f50937c6c45d73ec092dbfd92c4b56818ae6d1504bcd77078d0a6","src/copy/Opcodes.h":"6663f2ae6251b341352c0f75bea2a2c27babd555768e74727ecf524b1e83563e","src/copy/SourceNotes.h":"1e467f4e63d6f40a428e257fecd210bd14664516adf75a45cb17ab02ccd65fd8","src/copy/StencilEnums.h":"e5a1db4af868fd65591ed97594f7aa9a4cde79194da0cabd62b34e950b3b10b4","src/copy/Symbol.h":"603985e8c92c94e021baf3a0114dd384035eda60827016d310f1507c8396a45e","src/copy/ThrowMsgKind.h":"da805756961d81a2b50aeb391a02fd59a0aa39a9e3eb6aae21b423b15875ab30","src/env_coord.rs":"0be36a1bd307f5586affe0f3046d8b2ab2f5382b41b7b7bfb364b97d16a7c410","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/function.rs":"b841ba6f7ecee3a38a136ef9902fd1d4a3f6b0aa96d1e8d8340e7f26dead75d9","src/gcthings.rs":"baadc7284c01961a4aa44d464a6f5a0d0be427b6d099c949d4411846738d9a45","src/lib.rs":"b003e085344277d2987ef492dc513048e8ec83217850a22ba7ca06ac01bc9b5c","src/opcode.rs":"aabbeae9df11564d3275414497ff99499f3d297f6d062351180f77cb23e588a0","src/opcode_info.rs":"a27c6d5602f5ecdcc882a0167614bc7a7754d958124941b4c1c0cdc2b0a894f1","src/regexp.rs":"7436cf545b990bec7dcc51ff28d67deaca9d4ce894468fdad0dd44b25c571cf2","src/result.rs":"58a64e0619c4ba4c6b7d8834208698a8f1639ab1771f7ae22272f81fe3611d63","src/scope.rs":"57560a57ff8b07fff99185b894a950815069789a8e7f0c0a87c11bf56d15df8e","src/scope_notes.rs":"9947ba5aba3097321c76adcb5648a478e4a67e088fdc1e01511e51c4ad41a9f3","src/script.rs":"2921d7f445b20a95eac3c30abab7d99adb2bc31e7dcbc786a2da96db3f40c58c"},"package":null}
{"files":{"Cargo.toml":"f5439990185662ab31de12c07ef0c842438e3207bdfecc4fa6a9e4d4bea8a0d3","src/bytecode_offset.rs":"2aa7ba8c3cfbbd832092e65b599ab1c5a28d784ccc65d9e351bba656421b9a69","src/copy/AsyncFunctionResolveKind.h":"3851ecbb4728257595dd6e900749d1d8e02558574c00424a7ff0e3ca007fa6ec","src/copy/BytecodeFormatFlags.h":"a805958bfb8c97c87122722eb6343eacdf7bd5558ad8c79b324c7d55c1bda60e","src/copy/CheckIsObjectKind.h":"8f0e112396d966c9221a743d353f62671e04cdace7dd49a59898d94ba0f621b7","src/copy/FunctionFlags.h":"e2578c5747f298d03d81fa2b248e4b36800ac8d42d9d6ef801ebb1bc13bc3960","src/copy/FunctionPrefixKind.h":"f540a5c646a519b2d61aa27e4be865e08a31438def00ad5ba4ba2982ad1f2275","src/copy/GeneratorAndAsyncKind.h":"301668ce705970a51abfa94f89fd5db29ef5f129525110860e9e9bf7586ef187","src/copy/GeneratorResumeKind.h":"9e3cd9dc9c7f50937c6c45d73ec092dbfd92c4b56818ae6d1504bcd77078d0a6","src/copy/Opcodes.h":"9f0f80c4431668f6576459bebb45763603ac6bd52d405c1ac8ca668b84e4bf32","src/copy/SourceNotes.h":"1e467f4e63d6f40a428e257fecd210bd14664516adf75a45cb17ab02ccd65fd8","src/copy/StencilEnums.h":"e5a1db4af868fd65591ed97594f7aa9a4cde79194da0cabd62b34e950b3b10b4","src/copy/Symbol.h":"603985e8c92c94e021baf3a0114dd384035eda60827016d310f1507c8396a45e","src/copy/ThrowMsgKind.h":"5a06fa6cda81638b325af201a21396cfb460d993001a7725c858202b60237d04","src/env_coord.rs":"0be36a1bd307f5586affe0f3046d8b2ab2f5382b41b7b7bfb364b97d16a7c410","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/function.rs":"b841ba6f7ecee3a38a136ef9902fd1d4a3f6b0aa96d1e8d8340e7f26dead75d9","src/gcthings.rs":"baadc7284c01961a4aa44d464a6f5a0d0be427b6d099c949d4411846738d9a45","src/lib.rs":"b003e085344277d2987ef492dc513048e8ec83217850a22ba7ca06ac01bc9b5c","src/opcode.rs":"43ed28674d7caa15fa149b904794826d63a72611270ca3e838addb9fddd4f6a3","src/opcode_info.rs":"a27c6d5602f5ecdcc882a0167614bc7a7754d958124941b4c1c0cdc2b0a894f1","src/regexp.rs":"7436cf545b990bec7dcc51ff28d67deaca9d4ce894468fdad0dd44b25c571cf2","src/result.rs":"58a64e0619c4ba4c6b7d8834208698a8f1639ab1771f7ae22272f81fe3611d63","src/scope.rs":"3d2269a06d3e55d24f697338fedde95d9c653faec02d4694d0b63c79979e7c5a","src/scope_notes.rs":"9947ba5aba3097321c76adcb5648a478e4a67e088fdc1e01511e51c4ad41a9f3","src/script.rs":"2921d7f445b20a95eac3c30abab7d99adb2bc31e7dcbc786a2da96db3f40c58c"},"package":null}

Просмотреть файл

@ -34,7 +34,7 @@ enum {
JOF_LOOPHEAD = 20, /* JSOp::LoopHead, combines JOF_ICINDEX and JOF_UINT8 */
JOF_BIGINT = 21, /* uint32_t index for BigInt value */
JOF_CLASS_CTOR = 22, /* uint32_t atom index, sourceStart, sourceEnd */
// (23 is unused.)
JOF_TWO_UINT8 = 23, /* A pair of unspecified uint8_t arguments */
JOF_TYPEMASK = 0x001f, /* mask for above immediate types */
JOF_NAME = 1 << 5, /* name operation */

Просмотреть файл

@ -934,18 +934,6 @@
*/ \
MACRO(InitElem, init_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC) \
MACRO(InitHiddenElem, init_hidden_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC) \
/*
* Define a private field on `obj` with property key `id` and value `val`.
*
* `obj` must be an object,
* `id` must be a private name.
*
* Category: Objects
* Type: Defining properties
* Operands:
* Stack: obj, id, val => obj
*/ \
MACRO(InitPrivateElem, init_private_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC) \
/*
* Define an accessor property on `obj` with the given `getter`.
* `nameIndex` gives the property name.
@ -1050,17 +1038,6 @@
*/ \
MACRO(GetElem, get_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC) \
MACRO(CallElem, call_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC) \
/*
* Get the value of the private field `obj.#key`.
*
* Throws a TypeError if #key isn't on obj.
*
* Category: Objects
* Type: Accessing properties
* Operands:
* Stack: obj, key => obj[key]
*/ \
MACRO(GetPrivateElem, get_private_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC) \
/*
* Push the value of `obj.length`.
*
@ -1125,14 +1102,6 @@
* Stack: obj, key, val => val
*/ \
MACRO(StrictSetElem, strict_set_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPSET|JOF_CHECKSTRICT|JOF_IC) \
/*
* Like `JSOp::SetStrictElem`, but for private names. throw a TypeError if the private name doesnt' exist.
* Category: Objects
* Type: Accessing properties
* Operands:
* Stack: obj, key, val => val
*/ \
MACRO(SetPrivateElem, set_private_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPSET|JOF_CHECKSTRICT|JOF_IC) \
/*
* Delete a property from `obj`. Push true on success, false if the
* property existed but could not be deleted. This implements `delete
@ -1202,6 +1171,24 @@
* Stack: id, obj => (obj.hasOwnProperty(id))
*/ \
MACRO(HasOwn, has_own, NULL, 1, 2, 1, JOF_BYTE|JOF_IC) \
/*
* Push a bool representing the presence of private field id on obj.
* May throw, depending on the ThrowCondition.
*
* Two arguments:
* - throwCondition: One of the ThrowConditions defined in
* ThrowMsgKind.h. Determines why (or if) this op will throw.
* - msgKind: One of the ThrowMsgKinds defined in ThrowMsgKind.h, which
* maps to one of the messages in js.msg. Note: It's not possible to
* pass arguments to the message at the moment.
*
* Category: Control flow
* Category: Objects
* Type: Accessing properties
* Operands: ThrowCondition throwCondition, ThrowMsgKind msgKind
* Stack: obj, key => obj, key, (obj.hasOwnProperty(id))
*/ \
MACRO(CheckPrivateField, check_private_field, NULL, 3, 2, 3, JOF_TWO_UINT8|JOF_CHECKSTRICT) \
/*
* Push the SuperBase of the method `callee`. The SuperBase is
* `callee.[[HomeObject]].[[GetPrototypeOf]]()`, the object where `super`
@ -2565,9 +2552,9 @@
* example, `delete super.prop;` is allowed in methods, but always throws a
* ReferenceError.
*
* `msgNumber` must be one of the error codes listed in js/src/js.msg; it
* determines the `.message` and [[Prototype]] of the new Error object. The
* number of arguments in the error message must be 0.
* `msgNumber` determines the `.message` and [[Prototype]] of the new Error
* object. It must be an error number in js/public/friend/ErrorNumbers.msg.
* The number of arguments in the error message must be 0.
*
* Category: Control flow
* Type: Exceptions
@ -3687,6 +3674,8 @@
* a power of two. Use this macro to do so.
*/
#define FOR_EACH_TRAILING_UNUSED_OPCODE(MACRO) \
MACRO(238) \
MACRO(239) \
MACRO(240) \
MACRO(241) \
MACRO(242) \

Просмотреть файл

@ -16,11 +16,18 @@ namespace js {
enum class ThrowMsgKind : uint8_t {
AssignToCall,
IteratorNoThrow,
CantDeleteSuper
CantDeleteSuper,
// Private Fields:
PrivateDoubleInit,
MissingPrivateOnGet,
MissingPrivateOnSet,
};
JSErrNum ThrowMsgKindToErrNum(ThrowMsgKind kind);
// Used for CheckPrivateField
enum class ThrowCondition : uint8_t { ThrowHas, ThrowHasNot, NoThrow };
} // namespace js
#endif /* vm_ThrowMsgKind_h */

Просмотреть файл

@ -71,7 +71,6 @@ macro_rules! using_opcode_database {
(InitLockedProp, init_locked_prop, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_PROPINIT|JOF_IC),
(InitElem, init_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC),
(InitHiddenElem, init_hidden_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC),
(InitPrivateElem, init_private_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT|JOF_IC),
(InitPropGetter, init_prop_getter, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_PROPINIT),
(InitHiddenPropGetter, init_hidden_prop_getter, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_PROPINIT),
(InitElemGetter, init_elem_getter, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPINIT),
@ -84,18 +83,17 @@ macro_rules! using_opcode_database {
(CallProp, call_prop, NULL, 5, 1, 1, JOF_ATOM|JOF_PROP|JOF_TYPESET|JOF_IC),
(GetElem, get_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC),
(CallElem, call_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC),
(GetPrivateElem, get_private_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC),
(Length, length, NULL, 5, 1, 1, JOF_ATOM|JOF_PROP|JOF_TYPESET|JOF_IC),
(SetProp, set_prop, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_PROPSET|JOF_CHECKSLOPPY|JOF_IC),
(StrictSetProp, strict_set_prop, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_PROPSET|JOF_CHECKSTRICT|JOF_IC),
(SetElem, set_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPSET|JOF_CHECKSLOPPY|JOF_IC),
(StrictSetElem, strict_set_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPSET|JOF_CHECKSTRICT|JOF_IC),
(SetPrivateElem, set_private_elem, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_PROPSET|JOF_CHECKSTRICT|JOF_IC),
(DelProp, del_prop, NULL, 5, 1, 1, JOF_ATOM|JOF_PROP|JOF_CHECKSLOPPY),
(StrictDelProp, strict_del_prop, NULL, 5, 1, 1, JOF_ATOM|JOF_PROP|JOF_CHECKSTRICT),
(DelElem, del_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_CHECKSLOPPY),
(StrictDelElem, strict_del_elem, NULL, 1, 2, 1, JOF_BYTE|JOF_ELEM|JOF_CHECKSTRICT),
(HasOwn, has_own, NULL, 1, 2, 1, JOF_BYTE|JOF_IC),
(CheckPrivateField, check_private_field, NULL, 3, 2, 3, JOF_TWO_UINT8|JOF_CHECKSTRICT),
(SuperBase, super_base, NULL, 1, 1, 1, JOF_BYTE),
(GetPropSuper, get_prop_super, NULL, 5, 2, 1, JOF_ATOM|JOF_PROP|JOF_TYPESET|JOF_IC),
(GetElemSuper, get_elem_super, NULL, 1, 3, 1, JOF_BYTE|JOF_ELEM|JOF_TYPESET|JOF_IC),
@ -353,6 +351,9 @@ const JOF_BIGINT: u32 = 21;
/// uint32_t atom index, sourceStart, sourceEnd
const JOF_CLASS_CTOR: u32 = 22;
/// A pair of unspecified uint8_t arguments
const JOF_TWO_UINT8: u32 = 23;
/// mask for above immediate types
const JOF_TYPEMASK: u32 = 0x001f;

Просмотреть файл

@ -350,49 +350,6 @@ impl LexicalScopeData {
pub fn iter<'a>(&'a self) -> LexicalBindingIter<'a> {
LexicalBindingIter::new(self)
}
/// Mark a `name` binding originally at `original_binding_index`
/// Annex B function.
///
/// The binding at `original_binding_index` can be different thing
/// if any binding before it is already removed.
pub fn mark_annex_b_function(
&mut self,
name: SourceAtomSetIndex,
original_binding_index: usize,
) {
let binding_index = self.find_binding(name, original_binding_index);
// Lexical function becomes mutable binding.
debug_assert!(binding_index < self.const_start);
self.const_start -= 1;
self.base.bindings.remove(binding_index);
}
/// Find the binding `name`, originally stored at `original_binding_index`.
/// If the binding at `original_binding_index` isn't `name`, look for
/// `name`.
/// Panics if the binding with given `name` doesn't exist.
fn find_binding(&self, name: SourceAtomSetIndex, original_binding_index: usize) -> usize {
if original_binding_index < self.base.bindings.len() {
let binding = &self.base.bindings[original_binding_index];
if binding.name == name {
return original_binding_index;
}
}
// TODO: Search from `original_binding_index` to 0,
// instead of iterating all items.
for (i, binding) in self.base.bindings.iter().enumerate() {
if binding.name == name {
return i;
}
}
panic!("The binding should exist");
}
}
/// Corresponds to the iteration part of js::BindingIter

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -20,7 +20,7 @@ for commit in $(cat $topdir/tmp/commit-list)
do
git checkout $commit
# python script pulls from env variables, export those
export total_count=$(find $topdir/rust -iname '*.rs' -type f -exec cat {} + | grep -c -E "(Emit|Parse)Error::NotImplemented")
export total_count=$(find $topdir/rust -iname '*.rs' -type f -exec cat {} + | grep -c -E "(Emit|Parse|ScopeBuild)Error::NotImplemented")
export current_commit=$commit
python not_implemented_count.py
python not_implemented_badge.py

Просмотреть файл

@ -12,6 +12,7 @@ from . import types, grammar
if typing.TYPE_CHECKING:
from .parse_table import StateId
@dataclasses.dataclass(frozen=True)
class StackDiff:
"""StackDiff represent stack mutations which have to be performed when executing an action.
@ -269,7 +270,6 @@ class Unwind(Action):
return StackDiff(self.pop, self.nt, self.replay)
def unshift_action(self, num: int) -> Unwind:
assert self.replay >= num
return Unwind(self.nt, self.pop, replay=self.replay - num)
def shifted_action(self, shifted_term: Element) -> Unwind:
@ -281,6 +281,8 @@ class Reduce(Action):
table execution to resume shifting or replaying terms."""
__slots__ = ['unwind']
unwind: Unwind
def __init__(self, unwind: Unwind) -> None:
nt_name = unwind.nt.name
if isinstance(nt_name, InitNt):
@ -577,7 +579,6 @@ class FunCall(Action):
])))
def unshift_action(self, num: int) -> FunCall:
assert self.offset >= num
return FunCall(self.method, self.args,
trait=self.trait,
fallible=self.fallible,

31
third_party/rust/jsparagus/jsparagus/aps.py поставляемый
Просмотреть файл

@ -4,7 +4,7 @@ from __future__ import annotations
import typing
from dataclasses import dataclass
from .lr0 import ShiftedTerm, Term
from .actions import Action, FilterStates
from .actions import Action, FilterStates, Replay
# Avoid circular reference between this module and parse_table.py
if typing.TYPE_CHECKING:
@ -55,6 +55,7 @@ def reduce_path(pt: ParseTable, shifted: Path) -> typing.Iterator[Path]:
nt = stack_diff.nt
assert nt is not None
depth = stack_diff.pop + stack_diff.replay
assert depth >= 0
if depth > 0:
# We are reducing at least one element from the stack.
stacked = [i for i, e in enumerate(shifted) if pt.term_is_stacked(e.term)]
@ -240,10 +241,12 @@ class APS:
# TODO: Add support for Lookahead and flag manipulation rules, as
# both of these would invalide potential reduce paths.
if a.update_stack():
new_rp: typing.List[ShiftedTerm]
stack_diff = a.update_stack_with()
if stack_diff.replay < 0:
if isinstance(a, Replay):
assert stack_diff.pop == 0
assert stack_diff.nt is None
assert stack_diff.replay < 0
num_replay = -stack_diff.replay
assert len(self.replay) >= num_replay
new_rp = self.replay[:]
@ -267,10 +270,10 @@ class APS:
# we might loop on Optional rules. Which would not match
# the expected behaviour of the parser.
continue
reducing = not a.follow_edge()
assert stack_diff.pop >= 0
assert stack_diff.nt is not None
assert stack_diff.replay >= 0
for path in reduce_path(pt, prev_sh):
# path contains the chains of state shifted, including
# epsilon transitions. The head of the path should be able
@ -314,21 +317,31 @@ class APS:
new_sh = prev_sh[:-len(path)] + [Edge(path[0].src, None)]
assert pt.is_valid_path(new_sh)
# When reducing, we replay terms which got previously
# pushed on the stack as our lookahead. These terms are
# computed here such that we can traverse the graph from
# `to` state, using the replayed terms.
# Update the replay list of the new APS, starting with the
# reduced non-terminal and followed by the lookahead terms
# which have to be replayed and/or the truncated replay
# list, if any are consumed while reducing.
replay = stack_diff.replay
nt = stack_diff.nt
assert nt is not None
new_rp = [nt]
if replay > 0:
# Move previously shifted terms to the replay list, as
# they would have to be replayed after reducing the
# non-terminal.
stacked_terms = [
typing.cast(ShiftedTerm, edge.term)
for edge in path if pt.term_is_stacked(edge.term)
]
new_rp = new_rp + stacked_terms[-replay:]
new_rp = new_rp + rp
new_rp = new_rp + stacked_terms[-replay:] + rp
elif replay == 0:
new_rp = new_rp + rp
elif replay < 0:
# Remove the replayed tokens from the front of the
# replay list as they are consumed by this Unwind
# action.
assert len(rp) >= -replay
new_rp = new_rp + rp[-replay:]
new_la = la[:max(len(la) - replay, 0)]
# If we are reducing, this implies that we are not

Просмотреть файл

@ -29,17 +29,24 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
" ShiftError, ShiftAccept)\n")
out.write("\n")
methods: OrderedSet[FunCall] = OrderedSet()
methods: OrderedSet[typing.Tuple[str, int]] = OrderedSet()
def write_epsilon_transition(indent: str, dest: StateId):
if parse_table.states[dest].epsilon != []:
def write_epsilon_transition(indent: str, dest_idx: StateId):
dest = parse_table.states[dest_idx]
if dest.epsilon != []:
assert dest.index < len(parse_table.states)
# This is a transition to an action.
out.write("{}state_{}_actions(parser, lexer)\n".format(indent, dest))
args = ""
for i in range(dest.arguments):
out.write("{}r{} = parser.replay.pop()\n".format(indent, i))
args += ", r{}".format(i)
out.write("{}state_{}_actions(parser, lexer{})\n".format(indent, dest.index, args))
else:
# This is a transition to a shift.
assert dest.arguments == 0
out.write("{}top = parser.stack.pop()\n".format(indent))
out.write("{}top = StateTermValue({}, top.term, top.value, top.new_line)\n"
.format(indent, dest))
.format(indent, dest.index))
out.write("{}parser.stack.append(top)\n".format(indent))
def write_action(act: Action, indent: str = "") -> typing.Tuple[str, bool]:
@ -50,13 +57,16 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
return indent, True
if isinstance(act, (Unwind, Reduce)):
stack_diff = act.update_stack_with()
out.write("{}replay = [StateTermValue(0, {}, value, False)]\n"
replay = stack_diff.replay
out.write("{}replay = []\n".format(indent))
while replay > 0:
replay -= 1
out.write("{}replay.append(parser.stack.pop())\n".format(indent))
out.write("{}replay.append(StateTermValue(0, {}, value, False))\n"
.format(indent, repr(stack_diff.nt)))
if stack_diff.replay > 0:
out.write("{}replay = replay + parser.stack[-{}:]\n".format(indent, stack_diff.replay))
if stack_diff.replay + stack_diff.pop > 0:
out.write("{}del parser.stack[-{}:]\n".format(indent, stack_diff.replay + stack_diff.pop))
out.write("{}parser.shift_list(replay, lexer)\n".format(indent))
if stack_diff.pop > 0:
out.write("{}del parser.stack[-{}:]\n".format(indent, stack_diff.pop))
out.write("{}parser.replay.extend(replay)\n".format(indent))
return indent, act.follow_edge()
if isinstance(act, Accept):
out.write("{}raise ShiftAccept()\n".format(indent))
@ -81,6 +91,13 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
return indent, True
if isinstance(act, FunCall):
enclosing_call_offset = act.offset
if enclosing_call_offset < 0:
# When replayed terms are given as function arguments, they are
# not part of the stack. However, we cheat the system by
# replaying all terms necessary to pop them uniformly. Thus, the
# naming of variable for negative offsets will always match the
# naming of when the offset is 0.
enclosing_call_offset = 0
def map_with_offset(args: typing.Iterable[OutputExpr]) -> typing.Iterator[str]:
get_value = "parser.stack[{}].value"
@ -101,7 +118,7 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
assert len(act.args) == 1
out.write("{}{} = {}\n".format(indent, act.set_to, next(map_with_offset(act.args))))
else:
methods.add(act)
methods.add((act.method, len(act.args)))
out.write("{}{} = parser.methods.{}({})\n".format(
indent, act.set_to, method_name_to_python(act.method),
", ".join(map_with_offset(act.args))
@ -118,7 +135,23 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
assert i == state.index
if state.epsilon == []:
continue
out.write("def state_{}_actions(parser, lexer):\n".format(i))
args = []
for j in range(state.arguments):
args.append("a{}".format(j))
out.write("def state_{}_actions(parser, lexer{}):\n".format(
i, "".join(map(lambda s: ", " + s, args))))
if state.arguments > 0:
out.write(" parser.replay.extend([{}])\n".format(", ".join(reversed(args))))
term, dest = next(iter(state.epsilon))
if term.update_stack():
# If we Unwind, make sure all elements are replayed on the stack before starting.
out.write(" # {}\n".format(term))
stack_diff = term.update_stack_with()
replay = stack_diff.replay
if stack_diff.pop + replay >= 0:
while replay < 0:
replay += 1
out.write(" parser.stack.append(parser.replay.pop())\n")
out.write("{}\n".format(parse_table.debug_context(i, "\n", " # ")))
out.write(" value = None\n")
for action, dest in state.edges():
@ -178,12 +211,11 @@ def write_python_parse_table(out: io.TextIOBase, parse_table: ParseTable) -> Non
# Class used to provide default methods when not defined by the caller.
out.write("class DefaultMethods:\n")
for act in methods:
assert isinstance(act, FunCall)
args = ", ".join("x{}".format(i) for i in range(len(act.args)))
name = method_name_to_python(act.method)
out.write(" def {}(self, {}):\n".format(name, args))
out.write(" return ({}, {})\n".format(repr(name), args))
for method, arglen in methods:
act_args = ", ".join("x{}".format(i) for i in range(arglen))
name = method_name_to_python(method)
out.write(" def {}(self, {}):\n".format(name, act_args))
out.write(" return ({}, {})\n".format(repr(name), act_args))
if not methods:
out.write(" pass\n")
out.write("\n")

Просмотреть файл

@ -118,12 +118,14 @@ class RustActionWriter:
ast_builder = types.Type("AstBuilderDelegate", (types.Lifetime("alloc"),))
def __init__(self, writer, mode, traits, indent):
self.states = writer.states
self.writer = writer
self.mode = mode
self.traits = traits
self.indent = indent
self.has_ast_builder = self.ast_builder in traits
self.used_variables = set()
self.replay_args = []
def implement_trait(self, funcall):
"Returns True if this function call should be encoded"
@ -146,10 +148,14 @@ class RustActionWriter:
if isinstance(act, (Reduce, Unwind)):
yield "value"
elif isinstance(act, FunCall):
arg_offset = act.offset
if arg_offset < 0:
# See write_funcall.
arg_offset = 0
def map_with_offset(args):
for a in args:
if isinstance(a, int):
yield a + act.offset
yield a + arg_offset
if isinstance(a, str):
yield a
elif isinstance(a, Some):
@ -167,9 +173,10 @@ class RustActionWriter:
"Delegate to the RustParserWriter.write function"
self.writer.write(self.indent, string, *format_args)
def write_state_transitions(self, state):
def write_state_transitions(self, state, replay_args):
"Given a state, generate the code corresponding to all outgoing epsilon edges."
try:
self.replay_args = replay_args
assert not state.is_inconsistent()
assert len(list(state.shifted_edges())) == 0
for ctx in self.writer.parse_table.debug_context(state.index, None):
@ -189,12 +196,24 @@ class RustActionWriter:
print(self.writer.parse_table.debug_context(state.index, "\n", "# "))
raise exc
def write_replay_args(self, n):
rp_args = self.replay_args[:n]
rp_stck = self.replay_args[n:]
for tv in rp_stck:
self.write("parser.replay({});", tv)
return rp_args
def write_epsilon_transition(self, dest):
self.write("// --> {}", dest)
if dest >= self.writer.shift_count:
self.write("{}_{}(parser)", self.mode, dest)
# Replay arguments which are not accepted as input of the next state.
dest = self.states[dest]
rp_args = self.write_replay_args(dest.arguments)
self.write("// --> {}", dest.index)
if dest.index >= self.writer.shift_count:
self.write("{}_{}(parser{})", self.mode, dest.index, "".join(map(lambda v: ", " + v, rp_args)))
else:
self.write("parser.epsilon({});", dest)
assert dest.arguments == 0
self.write("parser.epsilon({});", dest.index)
self.write("Ok(false)")
def write_condition(self, state, first_act):
@ -213,6 +232,7 @@ class RustActionWriter:
# to make this backtracking visible through APS.
assert len(list(state.edges())) == 1
act, dest = next(state.edges())
assert len(self.replay_args) == 0
assert -act.offset > 0
self.write("// {}", str(act))
self.write("if !parser.check_not_on_new_line({})? {{", -act.offset)
@ -274,15 +294,44 @@ class RustActionWriter:
stack_diff = act.update_stack_with()
start = 0
depth = stack_diff.pop
if stack_diff.replay > 0:
self.write("parser.rewind({});", stack_diff.replay)
start = stack_diff.replay
args = len(self.replay_args)
replay = stack_diff.replay
if replay < 0:
# At the moment, we do not handle having more arguments than
# what is being popped and replay, thus write back the extra
# arguments and continue.
if stack_diff.pop + replay < 0:
self.replay_args = self.write_replay_args(replay)
replay = 0
if replay + stack_diff.pop - args > 0:
assert (replay >= 0 and args == 0) or \
(replay == 0 and args >= 0)
if replay > 0:
# At the moment, assume that arguments are only added once we
# consumed all replayed terms. Thus the replay_args can only be
# non-empty once replay is 0. Otherwise some of the replay_args
# would have to be replayed.
assert args == 0
self.write("parser.rewind({});", replay)
start = replay
depth += start
inputs = []
for i in range(start, depth):
name = 's'
name = 's{}'.format(i + 1)
if i + 1 not in self.used_variables:
name = '_s'
self.write("let {}{} = parser.pop();", name, i + 1)
name = '_' + name
inputs.append(name)
if stack_diff.pop > 0:
args_pop = min(len(self.replay_args), stack_diff.pop)
# Pop by moving arguments of the action function.
for i, name in enumerate(inputs[:args_pop]):
self.write("let {} = {};", name, self.replay_args[-i - 1])
# Pop by removing elements from the parser stack.
for name in inputs[args_pop:]:
self.write("let {} = parser.pop();", name)
if args_pop > 0:
del self.replay_args[-args_pop:]
if isinstance(act, Seq):
for a in act.actions:
@ -316,6 +365,7 @@ class RustActionWriter:
raise ValueError("Unexpected action type")
def write_replay(self, act):
assert len(self.replay_args) == 0
for shift_state in act.replay_steps:
self.write("parser.shift_replayed({});", shift_state)
@ -343,14 +393,23 @@ class RustActionWriter:
self.writer.nonterminal_to_camel(stack_diff.nt))
if value != "value":
self.write("let value = {};", value)
self.write("parser.replay(TermValue { term, value });")
if not act.follow_edge():
self.write("return Ok(false)")
self.write("let reduced = TermValue { term, value };")
self.replay_args.append("reduced")
def write_accept(self):
self.write("return Ok(true);")
def write_funcall(self, act, is_packed):
arg_offset = act.offset
if arg_offset < 0:
# NOTE: When replacing replayed stack elements by arguments, the
# offset is reduced by -1, and can become negative for cases where
# we read the value associated with an argument instead of the
# value read from the stack. However, write_action shift everything
# as-if we had replayed all the necessary terms, and therefore
# variables are named as-if the offset were 0.
arg_offset = 0
def no_unpack(val):
return val
@ -367,7 +426,7 @@ class RustActionWriter:
get_value = "s{}"
for a in args:
if isinstance(a, int):
yield unpack(get_value.format(a + act.offset))
yield unpack(get_value.format(a + arg_offset))
elif isinstance(a, str):
yield unpack(a)
elif isinstance(a, Some):
@ -743,6 +802,11 @@ class RustParserWriter:
traits_text = ' + '.join(map(self.type_to_rust, traits))
table_holder_name = self.to_camel_case(mode)
table_holder_type = table_holder_name + "<'alloc, Handler>"
# As we do not have default associated types yet in Rust
# (rust-lang#29661), we have to peak from the parameter of the
# ParserTrait.
assert list(traits)[0].name == "ParserTrait"
arg_type = "TermValue<" + self.type_to_rust(list(traits)[0].args[1]) + ">"
self.write(0, "struct {} {{", table_holder_type)
self.write(1, "fns: [fn(&mut Handler) -> Result<'alloc, bool>; {}]",
self.action_from_shift_count)
@ -754,6 +818,7 @@ class RustParserWriter:
self.write(1, "const TABLE : {} = {} {{", table_holder_type, table_holder_name)
self.write(2, "fns: [")
for state in self.states[start_at:end_at]:
assert state.arguments == 0
self.write(3, "{}_{},", mode, state.index)
self.write(2, "],")
self.write(1, "};")
@ -771,16 +836,20 @@ class RustParserWriter:
self.write(0, "}")
self.write(0, "")
for state in self.states[self.shift_count:]:
state_args = ""
for i in range(state.arguments):
state_args += ", v{}: {}".format(i, arg_type)
replay_args = ["v{}".format(i) for i in range(state.arguments)]
self.write(0, "#[inline]")
self.write(0, "#[allow(unused)]")
self.write(0,
"pub fn {}_{}<'alloc, Handler>(parser: &mut Handler) "
"pub fn {}_{}<'alloc, Handler>(parser: &mut Handler{}) "
"-> Result<'alloc, bool>",
mode, state.index)
mode, state.index, state_args)
self.write(0, "where")
self.write(1, "Handler: {}", ' + '.join(map(self.type_to_rust, traits)))
self.write(0, "{")
action_writer.write_state_transitions(state)
action_writer.write_state_transitions(state, replay_args)
self.write(0, "}")
def entry(self):

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -33,7 +33,7 @@ class StateAndTransitions:
"""
__slots__ = ["index", "locations", "terminals", "nonterminals", "errors",
"epsilon", "delayed_actions", "backedges", "_hash",
"epsilon", "delayed_actions", "arguments", "backedges", "_hash",
"stable_hash"]
# Numerical index of this state.
@ -48,6 +48,13 @@ class StateAndTransitions:
# conflict.
delayed_actions: OrderedFrozenSet[DelayedAction]
# Number of argument of an action state.
#
# Instead of having action states with a non-empty replay list of terms, we
# have a non-empty list of argument which size is described by this
# variable.
arguments: int
# Outgoing edges taken when shifting terminals.
terminals: typing.Dict[str, StateId]
@ -76,7 +83,8 @@ class StateAndTransitions:
self,
index: StateId,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet(),
arguments: int = 0
) -> None:
assert isinstance(locations, OrderedFrozenSet)
assert isinstance(delayed_actions, OrderedFrozenSet)
@ -87,6 +95,7 @@ class StateAndTransitions:
self.epsilon = []
self.locations = locations
self.delayed_actions = delayed_actions
self.arguments = arguments
self.backedges = OrderedSet()
# NOTE: The hash of a state depends on its location in the LR0
@ -98,6 +107,8 @@ class StateAndTransitions:
yield "delayed_actions"
for action in self.delayed_actions:
yield hash(action)
yield "arguments"
yield arguments
self._hash = hash(tuple(hashed_content()))
h = hashlib.md5()
@ -251,7 +262,8 @@ class StateAndTransitions:
def __eq__(self, other: object) -> bool:
return (isinstance(other, StateAndTransitions)
and sorted(self.locations) == sorted(other.locations)
and sorted(self.delayed_actions) == sorted(other.delayed_actions))
and sorted(self.delayed_actions) == sorted(other.delayed_actions)
and self.arguments == other.arguments)
def __hash__(self) -> int:
return self._hash
@ -439,13 +451,14 @@ class ParseTable:
def new_state(
self,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet(),
arguments: int = 0
) -> typing.Tuple[bool, StateAndTransitions]:
"""Get or create state with an LR0 location and delayed actions. Returns a tuple
where the first element is whether the element is newly created, and
the second element is the State object."""
index = len(self.states)
state = StateAndTransitions(index, locations, delayed_actions)
state = StateAndTransitions(index, locations, delayed_actions, arguments)
try:
return False, self.state_cache[state]
except KeyError:
@ -456,11 +469,12 @@ class ParseTable:
def get_state(
self,
locations: OrderedFrozenSet[str],
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet()
delayed_actions: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet(),
arguments: int = 0
) -> StateAndTransitions:
"""Like new_state(), but only returns the state without returning whether it is
newly created or not."""
_, state = self.new_state(locations, delayed_actions)
_, state = self.new_state(locations, delayed_actions, arguments)
return state
def remove_state(self, s: StateId, maybe_unreachable_set: OrderedSet[StateId]) -> None:
@ -1468,7 +1482,8 @@ class ParseTable:
# print("After:\n")
locations = reduce_state.locations
delayed: OrderedFrozenSet[DelayedAction] = OrderedFrozenSet(filter_by_replay_term.items())
is_new, filter_state = self.new_state(locations, delayed)
replay_size = 1 # Replay the unwound non-terminal
is_new, filter_state = self.new_state(locations, delayed, replay_size)
self.add_edge(reduce_state, unwind_term, filter_state.index)
if not is_new:
# The destination state already exists. Assert that all
@ -1500,7 +1515,7 @@ class ParseTable:
# Add FilterStates action from the filter_state to the replay_state.
locations = dest.locations
delayed = OrderedFrozenSet(itertools.chain(dest.delayed_actions, [replay_term]))
is_new, replay_state = self.new_state(locations, delayed)
is_new, replay_state = self.new_state(locations, delayed, replay_size)
self.add_edge(filter_state, filter_term, replay_state.index)
assert (not is_new) == (replay_term in replay_state)
@ -1541,13 +1556,20 @@ class ParseTable:
stack_diff = unwind_term.update_stack_with()
if not stack_diff.reduce_stack():
return False
if stack_diff.replay <= 0:
if stack_diff.pop + stack_diff.replay <= 0:
return False
# Remove replayed terms from the Unwind action.
replayed = replay_term.replay_steps
unshifted = min(stack_diff.replay, len(replayed))
unshifted = min(stack_diff.replay + min(s.arguments, stack_diff.pop), len(replayed))
if unshifted < len(replayed):
# We do not have all replayed terms as arguments, thus do not
# consume arguments
unshifted = min(stack_diff.replay, len(replayed))
if unshifted == 0:
return False
new_unwind_term = unwind_term.unshift_action(unshifted)
new_replay = new_unwind_term.update_stack_with().replay
# Replace the replay_term and unwind_term by terms which are
# avoiding extra replay actions.
@ -1555,11 +1577,12 @@ class ParseTable:
if len(replayed) == unshifted:
# The Unwind action replay more terms than what we originally
# had. The replay term is replaced by an Unwind edge instead.
assert s.arguments >= -new_replay
self.add_edge(s, new_unwind_term, unwind_dest_idx)
else:
# The Unwind action replay less terms than what we originally
# had. The replay terms is shortened and a new state is created
# to accomodate the new Unwind action.
# The Unwind action replay and pop less terms than what we
# originally had. Thus the replay action is shortened and a new
# state is created to accomodate the new Unwind action.
assert unshifted >= 1
new_replay_term = Replay(replayed[:-unshifted])
implicit_replay_term = Replay(replayed[-unshifted:])
@ -1573,6 +1596,7 @@ class ParseTable:
# Add new Replay and new Unwind actions.
self.add_edge(s, new_replay_term, unwind_state.index)
if is_new:
assert unwind_state.arguments >= -new_replay
self.add_edge(unwind_state, new_unwind_term, unwind_dest_idx)
assert not unwind_state.is_inconsistent()
assert not s.is_inconsistent()

Просмотреть файл

@ -343,6 +343,7 @@ def extract_types(paths):
extract_enum(types, paths, 'FunctionPrefixKind')
extract_enum(types, paths, 'GeneratorResumeKind')
extract_enum(types, paths, 'ThrowMsgKind')
extract_enum(types, paths, 'ThrowCondition', 'ThrowMsgKind.h')
extract_enum(types, paths, 'TryNoteKind', 'StencilEnums.h')
extract_symbols()
@ -438,6 +439,7 @@ def parse_operands(opcode):
'FunctionPrefixKind',
'GeneratorResumeKind',
'ThrowMsgKind',
'ThrowCondition',
]
for operand in opcode.operands_array:
@ -452,7 +454,7 @@ def parse_operands(opcode):
elif ty in copied_types:
pass
else:
print(f'Unspported operand type {ty}', file=sys.stderr)
print(f'Unsupported operand type {ty}', file=sys.stderr)
sys.exit(1)
if 'JOF_ATOM' in opcode.format_: