Bug 1623172 - Part 1: Receive str and allocate String in run_smoosh. r=nbp

Differential Revision: https://phabricator.services.mozilla.com/D67227

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Tooru Fujisawa 2020-03-19 05:32:32 +00:00
Родитель b213e509fc
Коммит 4b8433e27b
23 изменённых файлов: 726 добавлений и 719 удалений

Просмотреть файл

@ -20,7 +20,7 @@ tag = "v0.2.2"
[source."https://github.com/mozilla-spidermonkey/jsparagus"]
git = "https://github.com/mozilla-spidermonkey/jsparagus"
replace-with = "vendored-sources"
rev = "2ce030d69313c8d4203d6cbf8f2011e68da43e87"
rev = "1e747c70bd5b2e959880a57f57ed435660aaafe6"
[source."https://github.com/kvark/spirv_cross"]
branch = "wgpu"

10
Cargo.lock сгенерированный
Просмотреть файл

@ -2059,7 +2059,7 @@ dependencies = [
[[package]]
name = "jsparagus"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=2ce030d69313c8d4203d6cbf8f2011e68da43e87#2ce030d69313c8d4203d6cbf8f2011e68da43e87"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=1e747c70bd5b2e959880a57f57ed435660aaafe6#1e747c70bd5b2e959880a57f57ed435660aaafe6"
dependencies = [
"jsparagus-ast",
"jsparagus-emitter",
@ -2070,7 +2070,7 @@ dependencies = [
[[package]]
name = "jsparagus-ast"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=2ce030d69313c8d4203d6cbf8f2011e68da43e87#2ce030d69313c8d4203d6cbf8f2011e68da43e87"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=1e747c70bd5b2e959880a57f57ed435660aaafe6#1e747c70bd5b2e959880a57f57ed435660aaafe6"
dependencies = [
"bumpalo",
"indexmap",
@ -2081,7 +2081,7 @@ dependencies = [
[[package]]
name = "jsparagus-emitter"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=2ce030d69313c8d4203d6cbf8f2011e68da43e87#2ce030d69313c8d4203d6cbf8f2011e68da43e87"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=1e747c70bd5b2e959880a57f57ed435660aaafe6#1e747c70bd5b2e959880a57f57ed435660aaafe6"
dependencies = [
"bumpalo",
"byteorder",
@ -2092,7 +2092,7 @@ dependencies = [
[[package]]
name = "jsparagus-generated-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=2ce030d69313c8d4203d6cbf8f2011e68da43e87#2ce030d69313c8d4203d6cbf8f2011e68da43e87"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=1e747c70bd5b2e959880a57f57ed435660aaafe6#1e747c70bd5b2e959880a57f57ed435660aaafe6"
dependencies = [
"bumpalo",
"jsparagus-ast",
@ -2101,7 +2101,7 @@ dependencies = [
[[package]]
name = "jsparagus-parser"
version = "0.1.0"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=2ce030d69313c8d4203d6cbf8f2011e68da43e87#2ce030d69313c8d4203d6cbf8f2011e68da43e87"
source = "git+https://github.com/mozilla-spidermonkey/jsparagus?rev=1e747c70bd5b2e959880a57f57ed435660aaafe6#1e747c70bd5b2e959880a57f57ed435660aaafe6"
dependencies = [
"bumpalo",
"jsparagus-ast",

Просмотреть файл

@ -7,6 +7,6 @@ license = "MIT/Apache-2.0"
[dependencies]
bumpalo = "2.6.0"
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "2ce030d69313c8d4203d6cbf8f2011e68da43e87" }
jsparagus = { git = "https://github.com/mozilla-spidermonkey/jsparagus", rev = "1e747c70bd5b2e959880a57f57ed435660aaafe6" }
# For local development, replace above with
# jsparagus = { path = "{path to jsparagus}" }

Просмотреть файл

@ -268,7 +268,8 @@ pub unsafe extern "C" fn run_smoosh(
options: &SmooshCompileOptions,
) -> SmooshResult {
let text = str::from_utf8(slice::from_raw_parts(text, text_len)).expect("Invalid UTF8");
match smoosh(text, options) {
let allocator = bumpalo::Bump::new();
match smoosh(&allocator, text, options) {
Ok(mut result) => SmooshResult {
unimplemented: false,
error: CVec::empty(),
@ -278,7 +279,11 @@ pub unsafe extern "C" fn run_smoosh(
result
.all_atoms
.drain(..)
.map(|a| CVec::from(a.into_bytes()))
// FIXME: Instead of allocating extra buffer,
// pass raw pointer to the str (either held by
// `Bump`, or static), and relase non-static str
// in the later step.
.map(|a| CVec::from(a.to_string().into_bytes()))
.collect(),
),
gcthings: CVec::from(result.gcthings.drain(..).map(|x| x.into()).collect()),
@ -391,8 +396,11 @@ pub unsafe extern "C" fn free_smoosh(result: SmooshResult) {
//Vec::from_raw_parts(bytecode.data, bytecode.len, bytecode.capacity);
}
fn smoosh(text: &str, options: &SmooshCompileOptions) -> Result<EmitResult, SmooshError> {
let allocator = bumpalo::Bump::new();
fn smoosh<'alloc>(
allocator: &'alloc bumpalo::Bump,
text: &'alloc str,
options: &SmooshCompileOptions,
) -> Result<EmitResult<'alloc>, SmooshError> {
let parse_options = ParseOptions::new();
let atoms = Rc::new(RefCell::new(SourceAtomSet::new()));
let parse_result = match parse_script(&allocator, text, &parse_options, atoms.clone()) {

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"2f0c8ec9fd1c91e102de4204892131106741e99885a7418c60eb894ed1a51b82","ast.json":"33403bd4069c59ce599ccf72b7ca150428266942b199a1a24160478cb9140a21","generate_ast.py":"0a281a361ea94b79bcec1553db88b5473a04e7ec097bf264241df5e1ac83ee15","src/arena.rs":"659079c9cc2222fa2ea426f69fc1a639dafb2f7ca52ff7fd58d077a5ac26b64c","src/associated_data.rs":"c5e40ca2d435d151e2032d224a97a8bc38b5f19acb7c5e9cc667219b0857476a","src/dump_generated.rs":"a7f3bb4ebaac6f04cf4922606ad92c42072a06fc1f8b0f0f65ddaebf7c0ced38","src/json.rs":"ccc437c27f3fdaabb3d92b28eeb6bdee083bdf8014639ce09e7204a6529661a9","src/lib.rs":"a3bab6a72127984521d9f2a66a2736bbe651adb72fba62b5fbf64f0f6ab656de","src/source_atom_set.rs":"90618a621d8c92eeb91270f0f33e65a67663fdf19a92b3b97090d996f58ebeb0","src/source_location.rs":"3832440ecec6de726262837072810410bddb45c075288386509511c153f6afd9","src/source_location_accessor_generated.rs":"589fb97932d612837fdb1384b169830270ce5b6833bcec18e9dc5d5904f0e82e","src/type_id_generated.rs":"c70cbdc0f84c2fd7e84fa6ef1614be22f63e52ffcbf2f93714c189dce3dad557","src/types_generated.rs":"0b8bbd6d331736119d765ed1fc74998e8b8b0967c923bfec21425733007082fb","src/visit_generated.rs":"e2a75c3dfd5a5a6ba4150515c932596979f3e7ed68e601c2b964cde5271d9b69"},"package":null}
{"files":{"Cargo.toml":"2f0c8ec9fd1c91e102de4204892131106741e99885a7418c60eb894ed1a51b82","ast.json":"33403bd4069c59ce599ccf72b7ca150428266942b199a1a24160478cb9140a21","generate_ast.py":"0a281a361ea94b79bcec1553db88b5473a04e7ec097bf264241df5e1ac83ee15","src/arena.rs":"659079c9cc2222fa2ea426f69fc1a639dafb2f7ca52ff7fd58d077a5ac26b64c","src/associated_data.rs":"c5e40ca2d435d151e2032d224a97a8bc38b5f19acb7c5e9cc667219b0857476a","src/dump_generated.rs":"a7f3bb4ebaac6f04cf4922606ad92c42072a06fc1f8b0f0f65ddaebf7c0ced38","src/json.rs":"ccc437c27f3fdaabb3d92b28eeb6bdee083bdf8014639ce09e7204a6529661a9","src/lib.rs":"a3bab6a72127984521d9f2a66a2736bbe651adb72fba62b5fbf64f0f6ab656de","src/source_atom_set.rs":"6f4116ae6b5ae2838cea4b3bd8fa30003efe37080c6b4039f279f4c21fc0026b","src/source_location.rs":"3832440ecec6de726262837072810410bddb45c075288386509511c153f6afd9","src/source_location_accessor_generated.rs":"589fb97932d612837fdb1384b169830270ce5b6833bcec18e9dc5d5904f0e82e","src/type_id_generated.rs":"c70cbdc0f84c2fd7e84fa6ef1614be22f63e52ffcbf2f93714c189dce3dad557","src/types_generated.rs":"0b8bbd6d331736119d765ed1fc74998e8b8b0967c923bfec21425733007082fb","src/visit_generated.rs":"e2a75c3dfd5a5a6ba4150515c932596979f3e7ed68e601c2b964cde5271d9b69"},"package":null}

Просмотреть файл

@ -1,4 +1,4 @@
use std::collections::HashMap;
use indexmap::set::IndexSet;
/// Index into SourceAtomSet.atoms.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -160,18 +160,14 @@ for_all_common_atoms!(define_struct);
/// WARNING: This set itself does *NOT* map to JSScript::atoms().
#[derive(Debug)]
pub struct SourceAtomSet<'alloc> {
atoms: Vec<String>,
/// Cache for the case the same string is inserted multiple times.
atom_indices: HashMap<&'alloc str, SourceAtomSetIndex>,
atoms: IndexSet<&'alloc str>,
}
impl<'alloc> SourceAtomSet<'alloc> {
// Create a set, with all common atoms inserted.
pub fn new() -> Self {
let mut result = Self {
atoms: Vec::new(),
atom_indices: HashMap::new(),
atoms: IndexSet::new(),
};
result.insert_common_atoms();
result
@ -183,10 +179,7 @@ impl<'alloc> SourceAtomSet<'alloc> {
($self: ident,
$(($s:tt, $method:ident, $variant:ident),)*) => {
$(
$self.atoms.push($s.to_string());
$self
.atom_indices
.insert($s, CommonSourceAtomSetIndices::$method());
$self.atoms.insert($s);
)*
};
}
@ -199,31 +192,22 @@ impl<'alloc> SourceAtomSet<'alloc> {
// it with the result of this method.
pub fn new_uninitialized() -> Self {
Self {
atoms: Vec::new(),
atom_indices: HashMap::new(),
atoms: IndexSet::new(),
}
}
pub fn insert(&mut self, s: &'alloc str) -> SourceAtomSetIndex {
match self.atom_indices.get(s) {
Some(index) => return *index,
_ => {}
}
let index = self.atoms.len();
self.atoms.push(s.to_string());
let result = SourceAtomSetIndex::new(index);
self.atom_indices.insert(s, result);
result
let (index, _) = self.atoms.insert_full(s);
SourceAtomSetIndex::new(index)
}
pub fn get(&self, index: SourceAtomSetIndex) -> String {
self.atoms[usize::from(index)].clone()
pub fn get(&self, index: SourceAtomSetIndex) -> &'alloc str {
self.atoms.get_index(usize::from(index)).unwrap()
}
}
impl<'alloc> From<SourceAtomSet<'alloc>> for Vec<String> {
fn from(set: SourceAtomSet<'alloc>) -> Vec<String> {
set.atoms
impl<'alloc> From<SourceAtomSet<'alloc>> for Vec<&'alloc str> {
fn from(set: SourceAtomSet<'alloc>) -> Vec<&'alloc str> {
set.atoms.into_iter().collect()
}
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"3576f06eae3affbb94042108d3b7be9d79625e78a4bfd169ec5e502eeb8f8ba5","scripts/update_opcodes.py":"3201c128598663ffe6c6336c96b90dc288a4923fe522ab4dbcf5960eac1042c1","src/ast_emitter.rs":"120d208664f0c83e044c4c2d5f76e30b31502853fa3dde41e6d30dcff739e7c9","src/compilation_info.rs":"80aac7042fc242c9062bacef3462b338e1808117f4ec9f2ddb6a4cb6967186ea","src/copy/BytecodeUtil.h":"de32bfbf21c6345e312f60c8523b80bf74c05083e67a58fab8a3fa54a64b7530","src/copy/Opcodes.h":"5fe588f351eccd027b854fe8e6fb9cf52a9c7de3784af8234d15b9f90b114d54","src/dis.rs":"945acee8fce373f160bdd53f538bcc97037eaade0029996c9c9cbda980c65205","src/emitter.rs":"08bd18fb0fbaae3e8af68bc762484bb0e84e9addf1e2b4376aa2b449185f53fe","src/emitter_scope.rs":"c7109ae1fa69cbaa465229ed49ecc11aaa11fedc20ddd9a23aec0b719ee549ef","src/forward_jump_emitter.rs":"c7fb8bbb3f75f166fbc91a62d7eca15cec081cd7afe71890ba9b5cd44de233f6","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/gcthings.rs":"1113f59dc7e08eca6afd0d3dfae3f035c6f79e66f8146582de05c4b717a6b6c1","src/lib.rs":"aa7a9236a7c2b29d449a85177bab89ec5206f8bef6d2bb81bc1c27664841a9ed","src/opcode.rs":"6c2695e14552ab1f13c10f14f920c76b9bd70e4c70e250a317edbf9fe8478cb4","src/opcode_info.rs":"2a6f368f8df71cc34a39879d39d95d0a007e5f7b3928830ec90e6cd9305ff288","src/reference_op_emitter.rs":"4e88368d405eeedd8590be8757d1f1adf7cbb5b04c54d09c6bd2abac7a57b118","src/scope.rs":"daa020242e0ef38d359d2ddf70f9f7ce8bb9ce5f217af73476da14968f20fe47","src/scope_notes.rs":"5b46276cac144171d88e144d637d6db0210393a64ce98fbdb01959afd2b19692","src/scope_pass.rs":"f8f98df3d2f29dcfedf4815a3641f6ad529f1b4b219f282e3a4f11daab329f08","src/script_atom_set.rs":"8a567a786c6cc15192ca629deb3544546ec204efd1d05d16ff9ccdbf42a94e96"},"package":null}
{"files":{"Cargo.toml":"3576f06eae3affbb94042108d3b7be9d79625e78a4bfd169ec5e502eeb8f8ba5","scripts/update_opcodes.py":"3201c128598663ffe6c6336c96b90dc288a4923fe522ab4dbcf5960eac1042c1","src/ast_emitter.rs":"e43866c6fd30afe81f5945551662636c02c0c569c8c84626601e4a98be1af119","src/compilation_info.rs":"80aac7042fc242c9062bacef3462b338e1808117f4ec9f2ddb6a4cb6967186ea","src/copy/BytecodeUtil.h":"de32bfbf21c6345e312f60c8523b80bf74c05083e67a58fab8a3fa54a64b7530","src/copy/Opcodes.h":"5fe588f351eccd027b854fe8e6fb9cf52a9c7de3784af8234d15b9f90b114d54","src/dis.rs":"945acee8fce373f160bdd53f538bcc97037eaade0029996c9c9cbda980c65205","src/emitter.rs":"99a0441bcb8ba35ccd0b96162c4adc8458415fc00ab523bcfbced2498883e081","src/emitter_scope.rs":"c7109ae1fa69cbaa465229ed49ecc11aaa11fedc20ddd9a23aec0b719ee549ef","src/forward_jump_emitter.rs":"c7fb8bbb3f75f166fbc91a62d7eca15cec081cd7afe71890ba9b5cd44de233f6","src/frame_slot.rs":"b20c81d67c572f20d06d493b211cd3eaa0432a8294541583643b82df3af2f813","src/gcthings.rs":"1113f59dc7e08eca6afd0d3dfae3f035c6f79e66f8146582de05c4b717a6b6c1","src/lib.rs":"4bc440b68f2ed75357099d15e727eb6efb25dbd2cee3be5f6200ac5fb4f1d05a","src/opcode.rs":"6c2695e14552ab1f13c10f14f920c76b9bd70e4c70e250a317edbf9fe8478cb4","src/opcode_info.rs":"2a6f368f8df71cc34a39879d39d95d0a007e5f7b3928830ec90e6cd9305ff288","src/reference_op_emitter.rs":"4e88368d405eeedd8590be8757d1f1adf7cbb5b04c54d09c6bd2abac7a57b118","src/scope.rs":"daa020242e0ef38d359d2ddf70f9f7ce8bb9ce5f217af73476da14968f20fe47","src/scope_notes.rs":"5b46276cac144171d88e144d637d6db0210393a64ce98fbdb01959afd2b19692","src/scope_pass.rs":"f8f98df3d2f29dcfedf4815a3641f6ad529f1b4b219f282e3a4f11daab329f08","src/script_atom_set.rs":"8a567a786c6cc15192ca629deb3544546ec204efd1d05d16ff9ccdbf42a94e96"},"package":null}

Просмотреть файл

@ -23,7 +23,7 @@ pub fn emit_program<'alloc>(
options: &EmitOptions,
atoms: SourceAtomSet<'alloc>,
scope_data_map: ScopeDataMap,
) -> Result<EmitResult, EmitError> {
) -> Result<EmitResult<'alloc>, EmitError> {
let mut emitter = AstEmitter::new(options, atoms, scope_data_map);
match ast {

Просмотреть файл

@ -94,10 +94,10 @@ impl EmitOptions {
/// The output of bytecode-compiling a script or module.
#[derive(Debug)]
pub struct EmitResult {
pub struct EmitResult<'alloc> {
pub bytecode: Vec<u8>,
pub atoms: Vec<SourceAtomSetIndex>,
pub all_atoms: Vec<String>,
pub all_atoms: Vec<&'alloc str>,
pub gcthings: Vec<GCThing>,
pub scopes: Vec<ScopeData>,
pub scope_notes: Vec<ScopeNote>,
@ -155,7 +155,10 @@ impl InstructionWriter {
}
}
pub fn into_emit_result(self, compilation_info: CompilationInfo) -> EmitResult {
pub fn into_emit_result<'alloc>(
self,
compilation_info: CompilationInfo<'alloc>,
) -> EmitResult<'alloc> {
EmitResult {
bytecode: self.bytecode,
atoms: self.atoms.into(),

Просмотреть файл

@ -28,7 +28,7 @@ pub fn emit<'alloc>(
ast: &mut ast::types::Program,
options: &EmitOptions,
atoms: SourceAtomSet<'alloc>,
) -> Result<EmitResult, EmitError> {
) -> Result<EmitResult<'alloc>, EmitError> {
let scope_data_map = scope_pass::generate_scope_data(ast);
ast_emitter::emit_program(ast, options, atoms, scope_data_map)
}

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"0d55dda35a65ecfc48d6b7bd24d5469e69d37685996ca1e6326463a5107639d7","src/ast_builder.rs":"2f8428af6a6f2300d1127430488a56da6455092f11ebb20224bd583b9dcd71c8","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_errors.rs":"a622dcd9bfac98352664e15c74f0aeea3090fb7d362467747d19ac1800f364c7","src/error.rs":"cfd2d4ba8ccf4af13682ad7962f27e9677dd247445fb02d8d4de7b413f93a6d9","src/lib.rs":"a9b5c490c61226c136892ce86275ce8e654d5514ba37003b607f1595cf154b61","src/parser_tables_generated.rs":"abeadb8593d7d88f403a2a92e0c30fb4c3766069b2e64cdffe1e7090a65738e5","src/stack_value_generated.rs":"3755fe24032ad20b73b5de6623ea6ef45fedb7e164eaba007cb3bc221c959a22","src/token.rs":"7f9ea42618b7bfa5f8c46c10db86352da4036f28f8939985c10f9c1217f61b53","src/traits/mod.rs":"bcc2fa63444ba4c763dc996f410a6871f2cdc3bde54e1924ca8cc25cba92674a"},"package":null}
{"files":{"Cargo.toml":"0d55dda35a65ecfc48d6b7bd24d5469e69d37685996ca1e6326463a5107639d7","src/ast_builder.rs":"1ba2c44c5fef2986faf0874e598690551d02be7e8e46ba82bf69de3cc9979885","src/declaration_kind.rs":"fdfda2fe408cce1c637d17fee0813160619450472c6de9befc36ebeed892cc3c","src/early_errors.rs":"8a0b5c2e15311c3001aa4ddef1a5d15f489fc1dd673aeb44aa82a917a2ddab59","src/error.rs":"fec6d51fe1717afed19512c51563ae7aab965ff1c977bd49cc1d66fee6f72fb1","src/lib.rs":"a9b5c490c61226c136892ce86275ce8e654d5514ba37003b607f1595cf154b61","src/parser_tables_generated.rs":"d80dd1d9a5e6055a594d1cde7faadf375704c1360ca76a384a80a8c51e94fa62","src/stack_value_generated.rs":"3755fe24032ad20b73b5de6623ea6ef45fedb7e164eaba007cb3bc221c959a22","src/token.rs":"7f9ea42618b7bfa5f8c46c10db86352da4036f28f8939985c10f9c1217f61b53","src/traits/mod.rs":"bcc2fa63444ba4c763dc996f410a6871f2cdc3bde54e1924ca8cc25cba92674a"},"package":null}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -18,35 +18,35 @@ impl DeclarationInfo {
}
}
pub type EarlyErrorsResult = Result<(), ParseError>;
pub type EarlyErrorsResult<'alloc> = Result<(), ParseError<'alloc>>;
pub trait LexicalEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult;
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc>;
}
pub trait VarEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult;
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc>;
}
pub trait ParameterEarlyErrorsContext {
fn declare(
fn declare<'alloc>(
&mut self,
name: SourceAtomSetIndex,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult;
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc>;
}
// ===========================================================================
@ -62,7 +62,7 @@ impl IdentifierEarlyErrorsContext {
Self {}
}
fn is_strict(&self) -> Result<bool, ParseError> {
fn is_strict<'alloc>(&self) -> Result<bool, ParseError<'alloc>> {
Err(ParseError::NotImplemented(
"strict-mode-only early error is not yet supported",
))
@ -70,7 +70,7 @@ impl IdentifierEarlyErrorsContext {
// Not used due to NotImplemented before the callsite.
/*
fn is_module(&self) -> Result<bool, ParseError> {
fn is_module(&self) -> Result<bool, ParseError<'alloc>> {
Err(ParseError::NotImplemented(
"module-only early error is not yet supported",
))
@ -104,8 +104,8 @@ impl IdentifierEarlyErrorsContext {
pub fn check_binding_identifier<'alloc>(
&self,
token: &arena::Box<'alloc, Token>,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
if Self::is_arguments_identifier(token) || Self::is_eval_identifier(token) {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-identifiers-static-semantics-early-errors
@ -150,8 +150,8 @@ impl IdentifierEarlyErrorsContext {
pub fn check_label_identifier<'alloc>(
&self,
token: &arena::Box<'alloc, Token>,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
if Self::is_yield_identifier(token) {
return self.check_yield_common(token, atoms);
}
@ -166,8 +166,8 @@ impl IdentifierEarlyErrorsContext {
pub fn check_identifier_reference<'alloc>(
&self,
token: &arena::Box<'alloc, Token>,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
if Self::is_yield_identifier(token) {
return self.check_yield_common(token, atoms);
}
@ -182,8 +182,8 @@ impl IdentifierEarlyErrorsContext {
fn check_yield_common<'alloc>(
&self,
_token: &arena::Box<'alloc, Token>,
_atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
_atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-identifiers-static-semantics-early-errors
//
@ -228,8 +228,8 @@ impl IdentifierEarlyErrorsContext {
fn check_await_common<'alloc>(
&self,
_token: &arena::Box<'alloc, Token>,
_atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
_atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-identifiers-static-semantics-early-errors
//
@ -322,8 +322,8 @@ impl IdentifierEarlyErrorsContext {
fn check_identifier<'alloc>(
&self,
token: &arena::Box<'alloc, Token>,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
match token.terminal_id {
TerminalId::NameWithEscape => {
let name = token.value.as_atom();
@ -592,7 +592,7 @@ impl BlockEarlyErrorsContext {
}
}
fn is_strict(&self) -> Result<bool, ParseError> {
fn is_strict<'alloc>(&self) -> Result<bool, ParseError<'alloc>> {
Err(ParseError::NotImplemented(
"strict-mode-only early error is not yet supported",
))
@ -600,13 +600,13 @@ impl BlockEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for BlockEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -670,13 +670,13 @@ impl LexicalEarlyErrorsContext for BlockEarlyErrorsContext {
}
impl VarEarlyErrorsContext for BlockEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
// Static Semantics: Early Errors
@ -753,13 +753,13 @@ impl LexicalForHeadEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for LexicalForHeadEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -832,13 +832,13 @@ impl InternalForBodyEarlyErrorsContext {
}
impl VarEarlyErrorsContext for InternalForBodyEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
_atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
_atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
self.var_names_of_stmt
@ -864,13 +864,13 @@ impl LexicalForBodyEarlyErrorsContext {
}
impl VarEarlyErrorsContext for LexicalForBodyEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-for-statement-static-semantics-early-errors
//
@ -938,7 +938,7 @@ impl CaseBlockEarlyErrorsContext {
BlockEarlyErrorsContext::is_supported_var(kind)
}
fn is_strict(&self) -> Result<bool, ParseError> {
fn is_strict<'alloc>(&self) -> Result<bool, ParseError<'alloc>> {
Err(ParseError::NotImplemented(
"strict-mode-only early error is not yet supported",
))
@ -946,13 +946,13 @@ impl CaseBlockEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for CaseBlockEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -1013,13 +1013,13 @@ impl LexicalEarlyErrorsContext for CaseBlockEarlyErrorsContext {
}
impl VarEarlyErrorsContext for CaseBlockEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
// Static Semantics: Early Errors
@ -1075,12 +1075,12 @@ impl CatchParameterEarlyErrorsContext {
}
impl ParameterEarlyErrorsContext for CatchParameterEarlyErrorsContext {
fn declare(
fn declare<'alloc>(
&mut self,
name: SourceAtomSetIndex,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// BoundNames of CatchParameter
//
// CatchParameter => BindingIdentifier
@ -1124,13 +1124,13 @@ impl CatchBlockEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for CatchBlockEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-try-statement-static-semantics-early-errors
//
@ -1150,13 +1150,13 @@ impl LexicalEarlyErrorsContext for CatchBlockEarlyErrorsContext {
}
impl VarEarlyErrorsContext for CatchBlockEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-try-statement-static-semantics-early-errors
//
@ -1236,12 +1236,12 @@ impl FormalParametersEarlyErrorsContext {
}
impl ParameterEarlyErrorsContext for FormalParametersEarlyErrorsContext {
fn declare(
fn declare<'alloc>(
&mut self,
name: SourceAtomSetIndex,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// BoundNames of FormalParameterList
//
// Static Semantics: BoundNames
@ -1308,12 +1308,12 @@ impl UniqueFormalParametersEarlyErrorsContext {
}
impl ParameterEarlyErrorsContext for UniqueFormalParametersEarlyErrorsContext {
fn declare(
fn declare<'alloc>(
&mut self,
name: SourceAtomSetIndex,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
let kind = DeclarationKind::FormalParameter;
// Static Semantics: Early Errors
@ -1466,13 +1466,13 @@ impl InternalFunctionBodyEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for InternalFunctionBodyEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -1520,13 +1520,13 @@ impl LexicalEarlyErrorsContext for InternalFunctionBodyEarlyErrorsContext {
}
impl VarEarlyErrorsContext for InternalFunctionBodyEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
// Static Semantics: Early Errors
@ -1583,13 +1583,13 @@ impl FunctionBodyEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for FunctionBodyEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-function-definitions-static-semantics-early-errors
//
@ -1675,13 +1675,13 @@ impl LexicalEarlyErrorsContext for FunctionBodyEarlyErrorsContext {
}
impl VarEarlyErrorsContext for FunctionBodyEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
self.body.declare_var(name, kind, offset, atoms)
}
}
@ -1713,13 +1713,13 @@ impl UniqueFunctionBodyEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for UniqueFunctionBodyEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-arrow-function-definitions-static-semantics-early-errors
//
@ -1819,13 +1819,13 @@ impl LexicalEarlyErrorsContext for UniqueFunctionBodyEarlyErrorsContext {
}
impl VarEarlyErrorsContext for UniqueFunctionBodyEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
self.body.declare_var(name, kind, offset, atoms)
}
}
@ -1889,13 +1889,13 @@ impl ScriptEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for ScriptEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -1942,13 +1942,13 @@ impl LexicalEarlyErrorsContext for ScriptEarlyErrorsContext {
}
impl VarEarlyErrorsContext for ScriptEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
// Static Semantics: Early Errors
@ -2075,12 +2075,12 @@ impl ModuleEarlyErrorsContext {
}
#[allow(dead_code)]
pub fn add_exported_name(
pub fn add_exported_name<'alloc>(
&mut self,
name: SourceAtomSetIndex,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-module-semantics-static-semantics-early-errors
//
@ -2108,7 +2108,10 @@ impl ModuleEarlyErrorsContext {
}
#[allow(dead_code)]
pub fn check_exported_name(&self, atoms: &SourceAtomSet) -> EarlyErrorsResult {
pub fn check_exported_name<'alloc>(
&self,
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
// Static Semantics: Early Errors
// https://tc39.es/ecma262/#sec-module-semantics-static-semantics-early-errors
//
@ -2131,13 +2134,13 @@ impl ModuleEarlyErrorsContext {
}
impl LexicalEarlyErrorsContext for ModuleEarlyErrorsContext {
fn declare_lex(
fn declare_lex<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_lexical(kind));
// Static Semantics: Early Errors
@ -2196,13 +2199,13 @@ impl LexicalEarlyErrorsContext for ModuleEarlyErrorsContext {
}
impl VarEarlyErrorsContext for ModuleEarlyErrorsContext {
fn declare_var(
fn declare_var<'alloc>(
&mut self,
name: SourceAtomSetIndex,
kind: DeclarationKind,
offset: usize,
atoms: &SourceAtomSet,
) -> EarlyErrorsResult {
atoms: &SourceAtomSet<'alloc>,
) -> EarlyErrorsResult<'alloc> {
debug_assert!(Self::is_supported_var(kind));
// Static Semantics: Early Errors

Просмотреть файл

@ -4,7 +4,7 @@ use crate::Token;
use std::{convert::Infallible, error::Error, fmt};
#[derive(Debug)]
pub enum ParseError {
pub enum ParseError<'alloc> {
// Lexical errors
IllegalCharacter(char),
InvalidEscapeSequence,
@ -21,7 +21,7 @@ pub enum ParseError {
UnexpectedEnd,
InvalidAssignmentTarget,
InvalidParameter,
InvalidIdentifier(String, usize),
InvalidIdentifier(&'alloc str, usize),
AstError(String),
// Destructuring errors
@ -35,9 +35,9 @@ pub enum ParseError {
ArrowHeadInvalid,
ArrowParametersWithNonFinalRest,
DuplicateBinding(String, DeclarationKind, usize, DeclarationKind, usize),
DuplicateExport(String, usize, usize),
MissingExport(String, usize),
DuplicateBinding(&'alloc str, DeclarationKind, usize, DeclarationKind, usize),
DuplicateExport(&'alloc str, usize, usize),
MissingExport(&'alloc str, usize),
// Annex B. FunctionDeclarations in IfStatement Statement Clauses
// https://tc39.es/ecma262/#sec-functiondeclarations-in-ifstatement-statement-clauses
@ -45,7 +45,7 @@ pub enum ParseError {
LabelledFunctionDeclInSingleStatement,
}
impl ParseError {
impl<'alloc> ParseError<'alloc> {
pub fn message(&self) -> String {
match self {
ParseError::IllegalCharacter(c) => format!("illegal character: {:?}", c),
@ -110,30 +110,30 @@ impl ParseError {
}
}
impl PartialEq for ParseError {
impl<'alloc> PartialEq for ParseError<'alloc> {
fn eq(&self, other: &ParseError) -> bool {
format!("{:?}", self) == format!("{:?}", other)
}
}
impl fmt::Display for ParseError {
impl<'alloc> fmt::Display for ParseError<'alloc> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.message())
}
}
impl From<Infallible> for ParseError {
fn from(err: Infallible) -> ParseError {
impl<'alloc> From<Infallible> for ParseError<'alloc> {
fn from(err: Infallible) -> ParseError<'alloc> {
match err {}
}
}
impl From<AstError> for ParseError {
fn from(err: AstError) -> ParseError {
impl<'alloc> From<AstError> for ParseError<'alloc> {
fn from(err: AstError) -> ParseError<'alloc> {
ParseError::AstError(err)
}
}
impl Error for ParseError {}
impl<'alloc> Error for ParseError<'alloc> {}
pub type Result<T> = std::result::Result<T, ParseError>;
pub type Result<'alloc, T> = std::result::Result<T, ParseError<'alloc>>;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1 +1 @@
{"files":{"Cargo.toml":"7097b285b83fb1c9793b8b1d6b27002f346650c62e465fa3f8aadf4c4daca9ba","benches/__finStreamer-proto.js":"44edc00a99a8904f8c6bb0c42c7ba4f96ad611e61191d2702ecb228ae6d7b35d","benches/parser.rs":"752325679b87485c42ca33ede236f9c182e5b155d99533b13e78b7c00f4405e7","benches/simple.js":"fbb50c1c49c0b1e3740a79407a834248c1f8ebdb1b72530c0fc6df57d079f252","src/lexer.rs":"9774de3a858c0d60c5de2b9d4964ad7ada0def64b42bd429900c3c5df15f69fe","src/lib.rs":"e53bbe64a3b16ad18469549ebb30f627c05e3c56d4d2da46286dd954d0566faa","src/parser.rs":"bfa5f2603aa7eb2dbc3fa46ef4dfe86250ecb73083162edb9afb2de3fc2d9d46","src/simulator.rs":"cee6a2f2ab8fa5c5b41b2cb6b26af0e0b10896ed9b6a888f2cd397b0bc8d2031","src/tests.rs":"d02687cb7a2c7a99eb709cedbcc1ea95692960b669f2a757be0b7b9c635794fd"},"package":null}
{"files":{"Cargo.toml":"7097b285b83fb1c9793b8b1d6b27002f346650c62e465fa3f8aadf4c4daca9ba","benches/__finStreamer-proto.js":"44edc00a99a8904f8c6bb0c42c7ba4f96ad611e61191d2702ecb228ae6d7b35d","benches/parser.rs":"752325679b87485c42ca33ede236f9c182e5b155d99533b13e78b7c00f4405e7","benches/simple.js":"fbb50c1c49c0b1e3740a79407a834248c1f8ebdb1b72530c0fc6df57d079f252","src/lexer.rs":"b7e2cf7fb74207891a12486dbc5cba27f914a4746fd20281560a8447b178fd17","src/lib.rs":"a3625bd83de5e1b7170c3cd09eaafb3ff184f94eee560780903c7f0f980c2bce","src/parser.rs":"1ed9cab5c31732966cb48ea308c145e3c3207dc33df0f05f9dc0ea24607cfe95","src/simulator.rs":"0db2985408738f0c74d2335933dfea8c4f9f9282ab0daf89061242432248c4fe","src/tests.rs":"c75d42e4904021fe581524a40d0271f535aae16e7f01de36bae839214a3ae754"},"package":null}

Просмотреть файл

@ -73,7 +73,7 @@ impl<'alloc> Lexer<'alloc> {
chars.next()
}
pub fn next<'parser>(&mut self, parser: &Parser<'parser>) -> Result<Token> {
pub fn next<'parser>(&mut self, parser: &Parser<'parser>) -> Result<'alloc, Token> {
let (loc, value, terminal_id) = self.advance_impl(parser)?;
let value = match terminal_id {
TerminalId::NumericLiteral => {
@ -103,7 +103,7 @@ impl<'alloc> Lexer<'alloc> {
})
}
fn unexpected_err(&mut self) -> ParseError {
fn unexpected_err(&mut self) -> ParseError<'alloc> {
if let Some(ch) = self.peek() {
ParseError::IllegalCharacter(ch)
} else {
@ -194,7 +194,7 @@ impl<'alloc> Lexer<'alloc> {
/// that a SingleLineHTMLCloseComment must occur at the start of a line. We
/// use `is_on_new_line` for that.)
///
fn skip_multi_line_comment(&mut self, builder: &mut AutoCow<'alloc>) -> Result<()> {
fn skip_multi_line_comment(&mut self, builder: &mut AutoCow<'alloc>) -> Result<'alloc, ()> {
while let Some(ch) = self.chars.next() {
match ch {
'*' if self.peek() == Some('/') => {
@ -299,7 +299,7 @@ impl<'alloc> Lexer<'alloc> {
fn identifier_name_tail(
&mut self,
mut builder: AutoCow<'alloc>,
) -> Result<(bool, &'alloc str)> {
) -> Result<'alloc, (bool, &'alloc str)> {
while let Some(ch) = self.peek() {
if !is_identifier_part(ch) {
if ch == '\\' {
@ -324,7 +324,7 @@ impl<'alloc> Lexer<'alloc> {
Ok((has_different, builder.finish(&self)))
}
fn identifier_name(&mut self, mut builder: AutoCow<'alloc>) -> Result<&'alloc str> {
fn identifier_name(&mut self, mut builder: AutoCow<'alloc>) -> Result<'alloc, &'alloc str> {
match self.chars.next() {
None => {
return Err(ParseError::UnexpectedEnd);
@ -386,7 +386,7 @@ impl<'alloc> Lexer<'alloc> {
&mut self,
start: usize,
builder: AutoCow<'alloc>,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let (has_different, text) = self.identifier_name_tail(builder)?;
// https://tc39.es/ecma262/#sec-keywords-and-reserved-words
@ -489,7 +489,7 @@ impl<'alloc> Lexer<'alloc> {
&mut self,
start: usize,
builder: AutoCow<'alloc>,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let name = self.identifier_name(builder)?;
Ok((
SourceLocation::new(start, self.offset()),
@ -503,7 +503,7 @@ impl<'alloc> Lexer<'alloc> {
/// `u` Hex4Digits
/// `u{` CodePoint `}`
/// ```
fn unicode_escape_sequence_after_backslash(&mut self) -> Result<char> {
fn unicode_escape_sequence_after_backslash(&mut self) -> Result<'alloc, char> {
match self.chars.next() {
Some('u') => {}
_ => {
@ -513,7 +513,7 @@ impl<'alloc> Lexer<'alloc> {
self.unicode_escape_sequence_after_backslash_and_u()
}
fn unicode_escape_sequence_after_backslash_and_u(&mut self) -> Result<char> {
fn unicode_escape_sequence_after_backslash_and_u(&mut self) -> Result<'alloc, char> {
let value = match self.peek() {
Some('{') => {
self.chars.next();
@ -557,7 +557,7 @@ impl<'alloc> Lexer<'alloc> {
/// DecimalDigit :: one of
/// `0` `1` `2` `3` `4` `5` `6` `7` `8` `9`
/// ```
fn decimal_digits(&mut self) -> Result<bool> {
fn decimal_digits(&mut self) -> Result<'alloc, bool> {
if let Some('0'..='9') = self.peek() {
self.chars.next();
} else {
@ -568,7 +568,7 @@ impl<'alloc> Lexer<'alloc> {
Ok(true)
}
fn decimal_digits_after_first_digit(&mut self) -> Result<()> {
fn decimal_digits_after_first_digit(&mut self) -> Result<'alloc, ()> {
while let Some(next) = self.peek() {
match next {
'_' => {
@ -603,7 +603,7 @@ impl<'alloc> Lexer<'alloc> {
/// `+` DecimalDigits
/// `-` DecimalDigits
/// ```
fn optional_exponent(&mut self) -> Result<()> {
fn optional_exponent(&mut self) -> Result<'alloc, ()> {
if let Some('e') | Some('E') = self.peek() {
self.chars.next();
@ -622,7 +622,7 @@ impl<'alloc> Lexer<'alloc> {
/// HexDigit :: one of
/// `0` `1` `2` `3` `4` `5` `6` `7` `8` `9` `a` `b` `c` `d` `e` `f` `A` `B` `C` `D` `E` `F`
/// ```
fn hex_digit(&mut self) -> Result<u32> {
fn hex_digit(&mut self) -> Result<'alloc, u32> {
match self.chars.next() {
None => Err(ParseError::InvalidEscapeSequence),
Some(c @ '0'..='9') => Ok(c as u32 - '0' as u32),
@ -632,7 +632,7 @@ impl<'alloc> Lexer<'alloc> {
}
}
fn code_point_to_char(value: u32) -> Result<char> {
fn code_point_to_char(value: u32) -> Result<'alloc, char> {
if 0xd800 <= value && value <= 0xdfff {
Err(ParseError::NotImplemented(
"unicode escape sequences (surrogates)",
@ -646,7 +646,7 @@ impl<'alloc> Lexer<'alloc> {
/// Hex4Digits ::
/// HexDigit HexDigit HexDigit HexDigit
/// ```
fn hex_4_digits(&mut self) -> Result<char> {
fn hex_4_digits(&mut self) -> Result<'alloc, char> {
let mut value = 0;
for _ in 0..4 {
value = (value << 4) | self.hex_digit()?;
@ -662,7 +662,7 @@ impl<'alloc> Lexer<'alloc> {
/// HexDigit
/// HexDigits HexDigit
/// ```
fn code_point(&mut self) -> Result<char> {
fn code_point(&mut self) -> Result<'alloc, char> {
let mut value = self.hex_digit()?;
loop {
@ -707,7 +707,7 @@ impl<'alloc> Lexer<'alloc> {
/// BigIntLiteralSuffix ::
/// `n`
/// ```
fn numeric_literal_starting_with_zero(&mut self) -> Result<NumericType> {
fn numeric_literal_starting_with_zero(&mut self) -> Result<'alloc, NumericType> {
match self.peek() {
// BinaryIntegerLiteral ::
// `0b` BinaryDigits
@ -900,7 +900,7 @@ impl<'alloc> Lexer<'alloc> {
}
/// Scan a NumericLiteral (defined in 11.8.3, extended by B.1.1).
fn decimal_literal(&mut self) -> Result<NumericType> {
fn decimal_literal(&mut self) -> Result<'alloc, NumericType> {
// DecimalLiteral ::
// DecimalIntegerLiteral `.` DecimalDigits? ExponentPart?
// `.` DecimalDigits ExponentPart?
@ -922,12 +922,12 @@ impl<'alloc> Lexer<'alloc> {
/// Scan a NumericLiteral (defined in 11.8.3, extended by B.1.1) after
/// having already consumed the first character, which is a decimal digit.
fn decimal_literal_after_first_digit(&mut self) -> Result<NumericType> {
fn decimal_literal_after_first_digit(&mut self) -> Result<'alloc, NumericType> {
self.decimal_digits_after_first_digit()?;
self.decimal_literal_after_digits()
}
fn decimal_literal_after_digits(&mut self) -> Result<NumericType> {
fn decimal_literal_after_digits(&mut self) -> Result<'alloc, NumericType> {
match self.peek() {
Some('.') => {
self.chars.next();
@ -945,7 +945,7 @@ impl<'alloc> Lexer<'alloc> {
Ok(NumericType::Normal)
}
fn check_after_numeric_literal(&self) -> Result<()> {
fn check_after_numeric_literal(&self) -> Result<'alloc, ()> {
// The SourceCharacter immediately following a
// NumericLiteral must not be an IdentifierStart or
// DecimalDigit. (11.8.3)
@ -994,7 +994,7 @@ impl<'alloc> Lexer<'alloc> {
/// FourToSeven :: one of
/// `4` `5` `6` `7`
/// ```
fn escape_sequence(&mut self, text: &mut String<'alloc>) -> Result<()> {
fn escape_sequence(&mut self, text: &mut String<'alloc>) -> Result<'alloc, ()> {
match self.chars.next() {
None => {
return Err(ParseError::UnterminatedString);
@ -1137,7 +1137,7 @@ impl<'alloc> Lexer<'alloc> {
fn string_literal(
&mut self,
delimiter: char,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let offset = self.offset() - 1;
let mut builder = AutoCow::new(&self);
loop {
@ -1181,7 +1181,10 @@ impl<'alloc> Lexer<'alloc> {
// ------------------------------------------------------------------------
// 11.8.5 Regular Expression Literals
fn regular_expression_backslash_sequence(&mut self, text: &mut String<'alloc>) -> Result<()> {
fn regular_expression_backslash_sequence(
&mut self,
text: &mut String<'alloc>,
) -> Result<'alloc, ()> {
text.push('\\');
match self.chars.next() {
None | Some(CR) | Some(LF) | Some(LS) | Some(PS) => Err(ParseError::UnterminatedRegExp),
@ -1196,7 +1199,7 @@ impl<'alloc> Lexer<'alloc> {
fn regular_expression_literal(
&mut self,
builder: &mut AutoCow<'alloc>,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let offset = self.offset();
loop {
@ -1319,7 +1322,7 @@ impl<'alloc> Lexer<'alloc> {
start: usize,
subst: TerminalId,
tail: TerminalId,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let mut builder = AutoCow::new(&self);
while let Some(ch) = self.chars.next() {
// TemplateCharacter ::
@ -1377,7 +1380,7 @@ impl<'alloc> Lexer<'alloc> {
fn advance_impl<'parser>(
&mut self,
parser: &Parser<'parser>,
) -> Result<(SourceLocation, Option<&'alloc str>, TerminalId)> {
) -> Result<'alloc, (SourceLocation, Option<&'alloc str>, TerminalId)> {
let mut builder = AutoCow::new(&self);
let mut start = self.offset();
while let Some(c) = self.chars.next() {

Просмотреть файл

@ -37,7 +37,7 @@ pub fn parse_script<'alloc>(
source: &'alloc str,
_options: &ParseOptions,
atoms: Rc<RefCell<SourceAtomSet<'alloc>>>,
) -> Result<arena::Box<'alloc, Script<'alloc>>> {
) -> Result<'alloc, arena::Box<'alloc, Script<'alloc>>> {
Ok(parse(allocator, source, START_STATE_SCRIPT, atoms)?.to_ast()?)
}
@ -46,7 +46,7 @@ pub fn parse_module<'alloc>(
source: &'alloc str,
_options: &ParseOptions,
atoms: Rc<RefCell<SourceAtomSet<'alloc>>>,
) -> Result<arena::Box<'alloc, Module<'alloc>>> {
) -> Result<'alloc, arena::Box<'alloc, Module<'alloc>>> {
Ok(parse(allocator, source, START_STATE_MODULE, atoms)?.to_ast()?)
}
@ -55,7 +55,7 @@ fn parse<'alloc>(
source: &'alloc str,
start_state: usize,
atoms: Rc<RefCell<SourceAtomSet<'alloc>>>,
) -> Result<StackValue<'alloc>> {
) -> Result<'alloc, StackValue<'alloc>> {
let mut tokens = Lexer::new(allocator, source.chars(), atoms.clone());
TABLES.check();
@ -76,7 +76,7 @@ pub fn is_partial_script<'alloc>(
allocator: &'alloc bumpalo::Bump,
source: &'alloc str,
atoms: Rc<RefCell<SourceAtomSet<'alloc>>>,
) -> Result<bool> {
) -> Result<'alloc, bool> {
let mut parser = Parser::new(
AstBuilder::new(allocator, atoms.clone()),
START_STATE_SCRIPT,

Просмотреть файл

@ -24,7 +24,7 @@ impl<'alloc> AstBuilderDelegate<'alloc> for Parser<'alloc> {
}
impl<'alloc> ParserTrait<'alloc, StackValue<'alloc>> for Parser<'alloc> {
fn shift(&mut self, tv: TermValue<StackValue<'alloc>>) -> Result<bool> {
fn shift(&mut self, tv: TermValue<StackValue<'alloc>>) -> Result<'alloc, bool> {
// Shift the new terminal/nonterminal and its associated value.
let mut state = self.state();
assert!(state < TABLES.shift_count);
@ -71,7 +71,7 @@ impl<'alloc> ParserTrait<'alloc, StackValue<'alloc>> for Parser<'alloc> {
self.state_stack.pop().unwrap();
self.node_stack.pop().unwrap()
}
fn check_not_on_new_line(&mut self, peek: usize) -> Result<bool> {
fn check_not_on_new_line(&mut self, peek: usize) -> Result<'alloc, bool> {
let sv = &self.node_stack[self.node_stack.len() - peek].value;
if let StackValue::Token(ref token) = sv {
if !token.is_on_new_line {
@ -103,7 +103,7 @@ impl<'alloc> Parser<'alloc> {
*self.state_stack.last().unwrap()
}
pub fn write_token(&mut self, token: &Token) -> Result<()> {
pub fn write_token(&mut self, token: &Token) -> Result<'alloc, ()> {
// Shift the token with the associated StackValue.
let accept = self.shift(TermValue {
term: Term::Terminal(token.terminal_id),
@ -115,7 +115,7 @@ impl<'alloc> Parser<'alloc> {
Ok(())
}
pub fn close(&mut self, position: usize) -> Result<StackValue<'alloc>> {
pub fn close(&mut self, position: usize) -> Result<'alloc, StackValue<'alloc>> {
// Shift the End terminal with the associated StackValue.
let loc = SourceLocation::new(position, position);
let token = Token::basic_token(TerminalId::End, loc);
@ -138,7 +138,7 @@ impl<'alloc> Parser<'alloc> {
Ok(self.node_stack.pop().unwrap().value)
}
pub(crate) fn parse_error(t: &Token) -> ParseError {
pub(crate) fn parse_error(t: &Token) -> ParseError<'alloc> {
if t.terminal_id == TerminalId::End {
ParseError::UnexpectedEnd
} else {
@ -146,7 +146,7 @@ impl<'alloc> Parser<'alloc> {
}
}
fn try_error_handling(&mut self, t: TermValue<StackValue<'alloc>>) -> Result<bool> {
fn try_error_handling(&mut self, t: TermValue<StackValue<'alloc>>) -> Result<'alloc, bool> {
if let StackValue::Token(ref token) = t.value {
// Error tokens might them-self cause more errors to be reported.
// This happens due to the fact that the ErrorToken can be replayed,
@ -179,7 +179,7 @@ impl<'alloc> Parser<'alloc> {
Err(ParseError::ParserCannotUnpackToken)
}
pub(crate) fn recover(t: &Token, error_code: ErrorCode) -> Result<()> {
pub(crate) fn recover(t: &Token, error_code: ErrorCode) -> Result<'alloc, ()> {
match error_code {
ErrorCode::Asi => {
if t.is_on_new_line

Просмотреть файл

@ -32,7 +32,7 @@ pub struct Simulator<'alloc, 'parser> {
}
impl<'alloc, 'parser> ParserTrait<'alloc, ()> for Simulator<'alloc, 'parser> {
fn shift(&mut self, tv: TermValue<()>) -> Result<bool> {
fn shift(&mut self, tv: TermValue<()>) -> Result<'alloc, bool> {
// Shift the new terminal/nonterminal and its associated value.
let mut state = self.state();
assert!(state < TABLES.shift_count);
@ -92,7 +92,7 @@ impl<'alloc, 'parser> ParserTrait<'alloc, ()> for Simulator<'alloc, 'parser> {
self.sp -= 1;
TermValue { term: t, value: () }
}
fn check_not_on_new_line(&mut self, _peek: usize) -> Result<bool> {
fn check_not_on_new_line(&mut self, _peek: usize) -> Result<'alloc, bool> {
Ok(true)
}
}
@ -122,7 +122,7 @@ impl<'alloc, 'parser> Simulator<'alloc, 'parser> {
}
}
pub fn write_token(&mut self, token: &Token) -> Result<()> {
pub fn write_token(&mut self, token: &Token) -> Result<'alloc, ()> {
// Shift the token with the associated StackValue.
let accept = self.shift(TermValue {
term: Term::Terminal(token.terminal_id),
@ -134,7 +134,7 @@ impl<'alloc, 'parser> Simulator<'alloc, 'parser> {
Ok(())
}
pub fn close(&mut self, _position: usize) -> Result<()> {
pub fn close(&mut self, _position: usize) -> Result<'alloc, ()> {
// Shift the End terminal with the associated StackValue.
let accept = self.shift(TermValue {
term: Term::Terminal(TerminalId::End),
@ -152,7 +152,7 @@ impl<'alloc, 'parser> Simulator<'alloc, 'parser> {
}
// Simulate the action of Parser::try_error_handling.
fn try_error_handling(&mut self, t: TermValue<()>) -> Result<bool> {
fn try_error_handling(&mut self, t: TermValue<()>) -> Result<'alloc, bool> {
if let Term::Terminal(term) = t.term {
let bogus_loc = SourceLocation::new(0, 0);
let token = &Token::basic_token(term, bogus_loc);

Просмотреть файл

@ -63,7 +63,7 @@ fn chunks_to_string<'a, T: IntoChunks<'a>>(code: T) -> String {
fn try_parse<'alloc, 'source, Source>(
allocator: &'alloc Bump,
code: Source,
) -> Result<arena::Box<'alloc, Script<'alloc>>>
) -> Result<'alloc, arena::Box<'alloc, Script<'alloc>>>
where
Source: IntoChunks<'source>,
{

Различия файлов скрыты, потому что одна или несколько строк слишком длинны

Просмотреть файл

@ -433,7 +433,7 @@ class RustParserWriter:
self.write(0, "}")
self.write(0, "")
self.write(0, "pub trait ParserTrait<'alloc, Value> {")
self.write(1, "fn shift(&mut self, tv: TermValue<Value>) -> Result<bool>;")
self.write(1, "fn shift(&mut self, tv: TermValue<Value>) -> Result<'alloc, bool>;")
self.write(1, "fn replay(&mut self, tv: TermValue<Value>);")
self.write(1, "fn rewind(&mut self, n: usize) {")
self.write(2, "for _ in 0..n {")
@ -443,7 +443,7 @@ class RustParserWriter:
self.write(1, "}")
self.write(1, "fn epsilon(&mut self, state: usize);")
self.write(1, "fn pop(&mut self) -> TermValue<Value>;")
self.write(1, "fn check_not_on_new_line(&mut self, peek: usize) -> Result<bool>;")
self.write(1, "fn check_not_on_new_line(&mut self, peek: usize) -> Result<'alloc, bool>;")
self.write(0, "}")
self.write(0, "")
@ -620,7 +620,7 @@ class RustParserWriter:
used_variables = set()
traits = mode_traits
has_ast_builder = ast_builder in traits
self.write(0, "pub fn {}<'alloc, Handler>(parser: &mut Handler, state: usize) -> Result<bool>",
self.write(0, "pub fn {}<'alloc, Handler>(parser: &mut Handler, state: usize) -> Result<'alloc, bool>",
mode)
self.write(0, "where")
self.write(1, "Handler: {}", ' + '.join(map(self.type_to_rust, traits)))
@ -661,7 +661,7 @@ class RustParserWriter:
self.write(1, "handler: &mut AstBuilder<'alloc>,")
self.write(1, "prod: usize,")
self.write(1, "stack: &mut std::vec::Vec<StackValue<'alloc>>,")
self.write(0, ") -> Result<NonterminalId> {")
self.write(0, ") -> Result<'alloc, NonterminalId> {")
self.write(1, "match prod {")
for i, prod in enumerate(self.prods):
# If prod.nt is not in nonterminals, that means it's a goal