PR-URL: https://github.com/nodejs/node/pull/9618
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
This commit is contained in:
Michaël Zasso 2016-12-23 16:30:57 +01:00
Родитель a67a04d765
Коммит 2739185b79
1522 изменённых файлов: 117960 добавлений и 60299 удалений

7
deps/v8/.gitignore поставляемый
Просмотреть файл

@ -85,12 +85,13 @@ shell_g
/tools/luci-go/linux64/isolate
/tools/luci-go/mac64/isolate
/tools/luci-go/win64/isolate.exe
/tools/mb
/tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o
/tools/swarming_client
/tools/visual_studio/Debug
/tools/visual_studio/Release
/test/fuzzer/wasm
/test/fuzzer/wasm_asmjs
/v8.log.ll
/xcodebuild
TAGS
@ -105,3 +106,7 @@ turbo*.cfg
turbo*.dot
turbo*.json
v8.ignition_dispatches_table.json
/test/fuzzer/wasm.tar.gz
/test/fuzzer/wasm_asmjs.tar.gz
/src/inspector/build/closure-compiler.tar.gz
/src/inspector/build/closure-compiler

5
deps/v8/.gn поставляемый
Просмотреть файл

@ -14,8 +14,7 @@ secondary_source = "//build/secondary/"
# matching these patterns (see "gn help label_pattern" for format) will have
# their includes checked for proper dependencies when you run either
# "gn check" or "gn gen --check".
check_targets = [
]
check_targets = []
# These are the list of GN files that run exec_script. This whitelist exists
# to force additional review for new uses of exec_script, which is strongly
@ -45,7 +44,5 @@ exec_script_whitelist = [
"//build/toolchain/win/BUILD.gn",
"//build/util/branding.gni",
"//build/util/version.gni",
"//test/cctest/BUILD.gn",
"//test/test262/BUILD.gn",
"//test/unittests/BUILD.gn",
]

4
deps/v8/AUTHORS поставляемый
Просмотреть файл

@ -63,6 +63,7 @@ Felix Geisendörfer <haimuiba@gmail.com>
Filipe David Manana <fdmanana@gmail.com>
Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Geoffrey Garside <ggarside@gmail.com>
Gwang Yoon Hwang <ryumiel@company100.net>
Han Choongwoo <cwhan.tunz@gmail.com>
Hirofumi Mako <mkhrfm@gmail.com>
Honggyu Kim <honggyu.kp@gmail.com>
@ -95,14 +96,17 @@ Mike Pennisi <mike@mikepennisi.com>
Milton Chiang <milton.chiang@mediatek.com>
Myeong-bo Shim <m0609.shim@samsung.com>
Nicolas Antonius Ernst Leopold Maria Kaiser <nikai@nikai.net>
Noj Vek <nojvek@gmail.com>
Oleksandr Chekhovskyi <oleksandr.chekhovskyi@gmail.com>
Paolo Giarrusso <p.giarrusso@gmail.com>
Patrick Gansterer <paroga@paroga.com>
Peter Rybin <peter.rybin@gmail.com>
Peter Varga <pvarga@inf.u-szeged.hu>
Paul Lind <plind44@gmail.com>
Rafal Krypa <rafal@krypa.net>
Refael Ackermann <refack@gmail.com>
Rene Rebe <rene@exactcode.de>
Rob Wu <rob@robwu.nl>
Robert Mustacchi <rm@fingolfin.org>
Robert Nagy <robert.nagy@gmail.com>
Ryan Dahl <ry@tinyclouds.org>

468
deps/v8/BUILD.gn поставляемый
Просмотреть файл

@ -62,6 +62,9 @@ declare_args() {
# tools/gen-postmortem-metadata.py for details.
v8_postmortem_support = false
# Switches off inlining in V8.
v8_no_inline = false
# Similar to vfp but on MIPS.
v8_can_use_fpu_instructions = true
@ -94,6 +97,11 @@ if (v8_enable_disassembler == "") {
v8_enable_disassembler = is_debug && !v8_optimized_debug
}
# Specifies if the target build is a simulator build. Comparing target cpu
# with v8 target cpu to not affect simulator builds for making cross-compile
# snapshots.
is_target_simulator = target_cpu != v8_target_cpu
v8_generated_peephole_source = "$target_gen_dir/bytecode-peephole-table.cc"
v8_random_seed = "314159265"
v8_toolset_for_shell = "host"
@ -107,10 +115,7 @@ config("internal_config") {
include_dirs = [ "." ]
if (is_component_build) {
defines = [
"V8_SHARED",
"BUILDING_V8_SHARED",
]
defines = [ "BUILDING_V8_SHARED" ]
}
}
@ -134,12 +139,12 @@ config("libsampler_config") {
# itself.
config("external_config") {
if (is_component_build) {
defines = [
"V8_SHARED",
"USING_V8_SHARED",
]
defines = [ "USING_V8_SHARED" ]
}
include_dirs = [ "include" ]
if (v8_enable_inspector_override) {
include_dirs += [ "$target_gen_dir/include" ]
}
libs = []
if (is_android && current_toolchain != host_toolchain) {
libs += [ "log" ]
@ -200,7 +205,7 @@ config("toolchain") {
if (v8_current_cpu == "arm") {
defines += [ "V8_TARGET_ARCH_ARM" ]
if (arm_version == 7) {
if (arm_version >= 7) {
defines += [ "CAN_USE_ARMV7_INSTRUCTIONS" ]
}
if (arm_fpu == "vfpv3-d16") {
@ -233,6 +238,12 @@ config("toolchain") {
defines += [ "V8_TARGET_ARCH_ARM64" ]
}
# Mips64el/mipsel simulators.
if (is_target_simulator &&
(v8_current_cpu == "mipsel" || v8_current_cpu == "mips64el")) {
defines += [ "_MIPS_TARGET_SIMULATOR" ]
}
# TODO(jochen): Add support for mips.
if (v8_current_cpu == "mipsel") {
defines += [ "V8_TARGET_ARCH_MIPS" ]
@ -343,6 +354,13 @@ config("toolchain") {
} else if (dcheck_always_on) {
defines += [ "DEBUG" ]
}
if (v8_no_inline) {
cflags += [
"-fno-inline-functions",
"-fno-inline",
]
}
}
###############################################################################
@ -374,17 +392,16 @@ action("js2c") {
"src/js/regexp.js",
"src/js/arraybuffer.js",
"src/js/typedarray.js",
"src/js/iterator-prototype.js",
"src/js/collection.js",
"src/js/weak-collection.js",
"src/js/collection-iterator.js",
"src/js/promise.js",
"src/js/messages.js",
"src/js/array-iterator.js",
"src/js/string-iterator.js",
"src/js/templates.js",
"src/js/spread.js",
"src/js/proxy.js",
"src/js/async-await.js",
"src/debug/mirrors.js",
"src/debug/debug.js",
"src/debug/liveedit.js",
@ -427,7 +444,6 @@ action("js2c_experimental") {
sources = [
"src/js/macros.py",
"src/messages.h",
"src/js/harmony-async-await.js",
"src/js/harmony-atomics.js",
"src/js/harmony-simd.js",
"src/js/harmony-string-padding.js",
@ -439,8 +455,8 @@ action("js2c_experimental") {
if (v8_enable_i18n_support) {
sources += [
"src/js/datetime-format-to-parts.js",
"src/js/icu-case-mapping.js",
"src/js/intl-extra.js",
]
}
@ -809,6 +825,8 @@ v8_source_set("v8_base") {
sources = [
"//base/trace_event/common/trace_event_common.h",
### gcmole(all) ###
"include/v8-debug.h",
"include/v8-experimental.h",
"include/v8-platform.h",
@ -857,10 +875,14 @@ v8_source_set("v8_base") {
"src/ast/ast-numbering.h",
"src/ast/ast-traversal-visitor.h",
"src/ast/ast-type-bounds.h",
"src/ast/ast-types.cc",
"src/ast/ast-types.h",
"src/ast/ast-value-factory.cc",
"src/ast/ast-value-factory.h",
"src/ast/ast.cc",
"src/ast/ast.h",
"src/ast/compile-time-value.cc",
"src/ast/compile-time-value.h",
"src/ast/context-slot-cache.cc",
"src/ast/context-slot-cache.h",
"src/ast/modules.cc",
@ -868,7 +890,6 @@ v8_source_set("v8_base") {
"src/ast/prettyprinter.cc",
"src/ast/prettyprinter.h",
"src/ast/scopeinfo.cc",
"src/ast/scopeinfo.h",
"src/ast/scopes.cc",
"src/ast/scopes.h",
"src/ast/variables.cc",
@ -904,12 +925,14 @@ v8_source_set("v8_base") {
"src/builtins/builtins-handler.cc",
"src/builtins/builtins-internal.cc",
"src/builtins/builtins-interpreter.cc",
"src/builtins/builtins-iterator.cc",
"src/builtins/builtins-json.cc",
"src/builtins/builtins-math.cc",
"src/builtins/builtins-number.cc",
"src/builtins/builtins-object.cc",
"src/builtins/builtins-proxy.cc",
"src/builtins/builtins-reflect.cc",
"src/builtins/builtins-regexp.cc",
"src/builtins/builtins-sharedarraybuffer.cc",
"src/builtins/builtins-string.cc",
"src/builtins/builtins-symbol.cc",
@ -940,6 +963,8 @@ v8_source_set("v8_base") {
"src/compilation-cache.h",
"src/compilation-dependencies.cc",
"src/compilation-dependencies.h",
"src/compilation-info.cc",
"src/compilation-info.h",
"src/compilation-statistics.cc",
"src/compilation-statistics.h",
"src/compiler-dispatcher/compiler-dispatcher-job.cc",
@ -1069,6 +1094,8 @@ v8_source_set("v8_base") {
"src/compiler/loop-peeling.h",
"src/compiler/loop-variable-optimizer.cc",
"src/compiler/loop-variable-optimizer.h",
"src/compiler/machine-graph-verifier.cc",
"src/compiler/machine-graph-verifier.h",
"src/compiler/machine-operator-reducer.cc",
"src/compiler/machine-operator-reducer.h",
"src/compiler/machine-operator.cc",
@ -1132,12 +1159,16 @@ v8_source_set("v8_base") {
"src/compiler/store-store-elimination.h",
"src/compiler/tail-call-optimization.cc",
"src/compiler/tail-call-optimization.h",
"src/compiler/type-cache.cc",
"src/compiler/type-cache.h",
"src/compiler/type-hint-analyzer.cc",
"src/compiler/type-hint-analyzer.h",
"src/compiler/type-hints.cc",
"src/compiler/type-hints.h",
"src/compiler/typed-optimization.cc",
"src/compiler/typed-optimization.h",
"src/compiler/typer.cc",
"src/compiler/typer.h",
"src/compiler/types.cc",
"src/compiler/types.h",
"src/compiler/unwinding-info-writer.h",
"src/compiler/value-numbering-reducer.cc",
"src/compiler/value-numbering-reducer.h",
@ -1216,6 +1247,7 @@ v8_source_set("v8_base") {
"src/crankshaft/lithium-allocator.h",
"src/crankshaft/lithium-codegen.cc",
"src/crankshaft/lithium-codegen.h",
"src/crankshaft/lithium-inl.h",
"src/crankshaft/lithium.cc",
"src/crankshaft/lithium.h",
"src/crankshaft/typing.cc",
@ -1314,6 +1346,7 @@ v8_source_set("v8_base") {
"src/heap/heap-inl.h",
"src/heap/heap.cc",
"src/heap/heap.h",
"src/heap/incremental-marking-inl.h",
"src/heap/incremental-marking-job.cc",
"src/heap/incremental-marking-job.h",
"src/heap/incremental-marking.cc",
@ -1351,6 +1384,7 @@ v8_source_set("v8_base") {
"src/ic/call-optimization.h",
"src/ic/handler-compiler.cc",
"src/ic/handler-compiler.h",
"src/ic/handler-configuration.h",
"src/ic/ic-compiler.cc",
"src/ic/ic-compiler.h",
"src/ic/ic-inl.h",
@ -1382,12 +1416,13 @@ v8_source_set("v8_base") {
"src/interpreter/bytecode-generator.h",
"src/interpreter/bytecode-label.cc",
"src/interpreter/bytecode-label.h",
"src/interpreter/bytecode-operands.cc",
"src/interpreter/bytecode-operands.h",
"src/interpreter/bytecode-peephole-optimizer.cc",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-peephole-table.h",
"src/interpreter/bytecode-pipeline.cc",
"src/interpreter/bytecode-pipeline.h",
"src/interpreter/bytecode-register-allocator.cc",
"src/interpreter/bytecode-register-allocator.h",
"src/interpreter/bytecode-register-optimizer.cc",
"src/interpreter/bytecode-register-optimizer.h",
@ -1422,11 +1457,16 @@ v8_source_set("v8_base") {
"src/layout-descriptor.h",
"src/list-inl.h",
"src/list.h",
"src/locked-queue-inl.h",
"src/locked-queue.h",
"src/log-inl.h",
"src/log-utils.cc",
"src/log-utils.h",
"src/log.cc",
"src/log.h",
"src/lookup-cache-inl.h",
"src/lookup-cache.cc",
"src/lookup-cache.h",
"src/lookup.cc",
"src/lookup.h",
"src/machine-type.cc",
@ -1444,6 +1484,8 @@ v8_source_set("v8_base") {
"src/objects.h",
"src/ostreams.cc",
"src/ostreams.h",
"src/parsing/duplicate-finder.cc",
"src/parsing/duplicate-finder.h",
"src/parsing/expression-classifier.h",
"src/parsing/func-name-inferrer.cc",
"src/parsing/func-name-inferrer.h",
@ -1495,6 +1537,8 @@ v8_source_set("v8_base") {
"src/profiler/strings-storage.h",
"src/profiler/tick-sample.cc",
"src/profiler/tick-sample.h",
"src/profiler/tracing-cpu-profiler.cc",
"src/profiler/tracing-cpu-profiler.h",
"src/profiler/unbound-queue-inl.h",
"src/profiler/unbound-queue.h",
"src/property-descriptor.cc",
@ -1601,15 +1645,13 @@ v8_source_set("v8_base") {
"src/transitions-inl.h",
"src/transitions.cc",
"src/transitions.h",
"src/type-cache.cc",
"src/type-cache.h",
"src/type-feedback-vector-inl.h",
"src/type-feedback-vector.cc",
"src/type-feedback-vector.h",
"src/type-hints.cc",
"src/type-hints.h",
"src/type-info.cc",
"src/type-info.h",
"src/types.cc",
"src/types.h",
"src/unicode-cache-inl.h",
"src/unicode-cache.h",
"src/unicode-decoder.cc",
@ -1629,6 +1671,7 @@ v8_source_set("v8_base") {
"src/v8threads.h",
"src/value-serializer.cc",
"src/value-serializer.h",
"src/vector.h",
"src/version.cc",
"src/version.h",
"src/vm-state-inl.h",
@ -1636,8 +1679,6 @@ v8_source_set("v8_base") {
"src/wasm/ast-decoder.cc",
"src/wasm/ast-decoder.h",
"src/wasm/decoder.h",
"src/wasm/encoder.cc",
"src/wasm/encoder.h",
"src/wasm/leb-helper.h",
"src/wasm/module-decoder.cc",
"src/wasm/module-decoder.h",
@ -1654,20 +1695,27 @@ v8_source_set("v8_base") {
"src/wasm/wasm-js.cc",
"src/wasm/wasm-js.h",
"src/wasm/wasm-macro-gen.h",
"src/wasm/wasm-module-builder.cc",
"src/wasm/wasm-module-builder.h",
"src/wasm/wasm-module.cc",
"src/wasm/wasm-module.h",
"src/wasm/wasm-opcodes.cc",
"src/wasm/wasm-opcodes.h",
"src/wasm/wasm-result.cc",
"src/wasm/wasm-result.h",
"src/zone-allocator.h",
"src/zone-containers.h",
"src/zone.cc",
"src/zone.h",
"src/zone/accounting-allocator.cc",
"src/zone/accounting-allocator.h",
"src/zone/zone-allocator.h",
"src/zone/zone-allocator.h",
"src/zone/zone-containers.h",
"src/zone/zone-segment.cc",
"src/zone/zone-segment.h",
"src/zone/zone.cc",
"src/zone/zone.h",
]
if (v8_current_cpu == "x86") {
sources += [
sources += [ ### gcmole(arch:ia32) ###
"src/builtins/ia32/builtins-ia32.cc",
"src/compiler/ia32/code-generator-ia32.cc",
"src/compiler/ia32/instruction-codes-ia32.h",
@ -1696,6 +1744,8 @@ v8_source_set("v8_base") {
"src/ia32/interface-descriptors-ia32.cc",
"src/ia32/macro-assembler-ia32.cc",
"src/ia32/macro-assembler-ia32.h",
"src/ia32/simulator-ia32.cc",
"src/ia32/simulator-ia32.h",
"src/ic/ia32/access-compiler-ia32.cc",
"src/ic/ia32/handler-compiler-ia32.cc",
"src/ic/ia32/ic-compiler-ia32.cc",
@ -1705,7 +1755,7 @@ v8_source_set("v8_base") {
"src/regexp/ia32/regexp-macro-assembler-ia32.h",
]
} else if (v8_current_cpu == "x64") {
sources += [
sources += [ ### gcmole(arch:x64) ###
"src/builtins/x64/builtins-x64.cc",
"src/compiler/x64/code-generator-x64.cc",
"src/compiler/x64/instruction-codes-x64.h",
@ -1728,6 +1778,7 @@ v8_source_set("v8_base") {
"src/ic/x64/stub-cache-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.cc",
"src/regexp/x64/regexp-macro-assembler-x64.h",
"src/third_party/valgrind/valgrind.h",
"src/x64/assembler-x64-inl.h",
"src/x64/assembler-x64.cc",
"src/x64/assembler-x64.h",
@ -1744,9 +1795,12 @@ v8_source_set("v8_base") {
"src/x64/interface-descriptors-x64.cc",
"src/x64/macro-assembler-x64.cc",
"src/x64/macro-assembler-x64.h",
"src/x64/simulator-x64.cc",
"src/x64/simulator-x64.h",
"src/x64/sse-instr.h",
]
} else if (v8_current_cpu == "arm") {
sources += [
sources += [ ### gcmole(arch:arm) ###
"src/arm/assembler-arm-inl.h",
"src/arm/assembler-arm.cc",
"src/arm/assembler-arm.h",
@ -1792,7 +1846,7 @@ v8_source_set("v8_base") {
"src/regexp/arm/regexp-macro-assembler-arm.h",
]
} else if (v8_current_cpu == "arm64") {
sources += [
sources += [ ### gcmole(arch:arm64) ###
"src/arm64/assembler-arm64-inl.h",
"src/arm64/assembler-arm64.cc",
"src/arm64/assembler-arm64.h",
@ -1850,8 +1904,8 @@ v8_source_set("v8_base") {
"src/regexp/arm64/regexp-macro-assembler-arm64.cc",
"src/regexp/arm64/regexp-macro-assembler-arm64.h",
]
} else if (v8_current_cpu == "mipsel") {
sources += [
} else if (v8_current_cpu == "mips" || v8_current_cpu == "mipsel") {
sources += [ ### gcmole(arch:mipsel) ###
"src/builtins/mips/builtins-mips.cc",
"src/compiler/mips/code-generator-mips.cc",
"src/compiler/mips/instruction-codes-mips.h",
@ -1892,8 +1946,8 @@ v8_source_set("v8_base") {
"src/regexp/mips/regexp-macro-assembler-mips.cc",
"src/regexp/mips/regexp-macro-assembler-mips.h",
]
} else if (v8_current_cpu == "mips64el") {
sources += [
} else if (v8_current_cpu == "mips64" || v8_current_cpu == "mips64el") {
sources += [ ### gcmole(arch:mips64el) ###
"src/builtins/mips64/builtins-mips64.cc",
"src/compiler/mips64/code-generator-mips64.cc",
"src/compiler/mips64/instruction-codes-mips64.h",
@ -1934,8 +1988,50 @@ v8_source_set("v8_base") {
"src/regexp/mips64/regexp-macro-assembler-mips64.cc",
"src/regexp/mips64/regexp-macro-assembler-mips64.h",
]
} else if (v8_current_cpu == "ppc" || v8_current_cpu == "ppc64") {
sources += [ ### gcmole(arch:ppc) ###
"src/builtins/ppc/builtins-ppc.cc",
"src/compiler/ppc/code-generator-ppc.cc",
"src/compiler/ppc/instruction-codes-ppc.h",
"src/compiler/ppc/instruction-scheduler-ppc.cc",
"src/compiler/ppc/instruction-selector-ppc.cc",
"src/crankshaft/ppc/lithium-codegen-ppc.cc",
"src/crankshaft/ppc/lithium-codegen-ppc.h",
"src/crankshaft/ppc/lithium-gap-resolver-ppc.cc",
"src/crankshaft/ppc/lithium-gap-resolver-ppc.h",
"src/crankshaft/ppc/lithium-ppc.cc",
"src/crankshaft/ppc/lithium-ppc.h",
"src/debug/ppc/debug-ppc.cc",
"src/full-codegen/ppc/full-codegen-ppc.cc",
"src/ic/ppc/access-compiler-ppc.cc",
"src/ic/ppc/handler-compiler-ppc.cc",
"src/ic/ppc/ic-compiler-ppc.cc",
"src/ic/ppc/ic-ppc.cc",
"src/ic/ppc/stub-cache-ppc.cc",
"src/ppc/assembler-ppc-inl.h",
"src/ppc/assembler-ppc.cc",
"src/ppc/assembler-ppc.h",
"src/ppc/code-stubs-ppc.cc",
"src/ppc/code-stubs-ppc.h",
"src/ppc/codegen-ppc.cc",
"src/ppc/codegen-ppc.h",
"src/ppc/constants-ppc.cc",
"src/ppc/constants-ppc.h",
"src/ppc/cpu-ppc.cc",
"src/ppc/deoptimizer-ppc.cc",
"src/ppc/disasm-ppc.cc",
"src/ppc/frames-ppc.cc",
"src/ppc/frames-ppc.h",
"src/ppc/interface-descriptors-ppc.cc",
"src/ppc/macro-assembler-ppc.cc",
"src/ppc/macro-assembler-ppc.h",
"src/ppc/simulator-ppc.cc",
"src/ppc/simulator-ppc.h",
"src/regexp/ppc/regexp-macro-assembler-ppc.cc",
"src/regexp/ppc/regexp-macro-assembler-ppc.h",
]
} else if (v8_current_cpu == "s390" || v8_current_cpu == "s390x") {
sources += [
sources += [ ### gcmole(arch:s390) ###
"src/builtins/s390/builtins-s390.cc",
"src/compiler/s390/code-generator-s390.cc",
"src/compiler/s390/instruction-codes-s390.h",
@ -1976,6 +2072,46 @@ v8_source_set("v8_base") {
"src/s390/simulator-s390.cc",
"src/s390/simulator-s390.h",
]
} else if (v8_current_cpu == "x87") {
sources += [ ### gcmole(arch:x87) ###
"src/builtins/x87/builtins-x87.cc",
"src/compiler/x87/code-generator-x87.cc",
"src/compiler/x87/instruction-codes-x87.h",
"src/compiler/x87/instruction-scheduler-x87.cc",
"src/compiler/x87/instruction-selector-x87.cc",
"src/crankshaft/x87/lithium-codegen-x87.cc",
"src/crankshaft/x87/lithium-codegen-x87.h",
"src/crankshaft/x87/lithium-gap-resolver-x87.cc",
"src/crankshaft/x87/lithium-gap-resolver-x87.h",
"src/crankshaft/x87/lithium-x87.cc",
"src/crankshaft/x87/lithium-x87.h",
"src/debug/x87/debug-x87.cc",
"src/full-codegen/x87/full-codegen-x87.cc",
"src/ic/x87/access-compiler-x87.cc",
"src/ic/x87/handler-compiler-x87.cc",
"src/ic/x87/ic-compiler-x87.cc",
"src/ic/x87/ic-x87.cc",
"src/ic/x87/stub-cache-x87.cc",
"src/regexp/x87/regexp-macro-assembler-x87.cc",
"src/regexp/x87/regexp-macro-assembler-x87.h",
"src/x87/assembler-x87-inl.h",
"src/x87/assembler-x87.cc",
"src/x87/assembler-x87.h",
"src/x87/code-stubs-x87.cc",
"src/x87/code-stubs-x87.h",
"src/x87/codegen-x87.cc",
"src/x87/codegen-x87.h",
"src/x87/cpu-x87.cc",
"src/x87/deoptimizer-x87.cc",
"src/x87/disasm-x87.cc",
"src/x87/frames-x87.cc",
"src/x87/frames-x87.h",
"src/x87/interface-descriptors-x87.cc",
"src/x87/macro-assembler-x87.cc",
"src/x87/macro-assembler-x87.h",
"src/x87/simulator-x87.cc",
"src/x87/simulator-x87.h",
]
}
configs = [ ":internal_config" ]
@ -2010,14 +2146,16 @@ v8_source_set("v8_base") {
sources += [ "$target_gen_dir/debug-support.cc" ]
deps += [ ":postmortem-metadata" ]
}
if (v8_enable_inspector_override) {
deps += [ "src/inspector:inspector" ]
}
}
v8_source_set("v8_libbase") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
"src/base/accounting-allocator.cc",
"src/base/accounting-allocator.h",
"src/base/adapters.h",
"src/base/atomic-utils.h",
"src/base/atomicops.h",
@ -2035,6 +2173,7 @@ v8_source_set("v8_libbase") {
"src/base/bits.cc",
"src/base/bits.h",
"src/base/build_config.h",
"src/base/compiler-specific.h",
"src/base/cpu.cc",
"src/base/cpu.h",
"src/base/debug/stack_trace.cc",
@ -2048,6 +2187,7 @@ v8_source_set("v8_libbase") {
"src/base/free_deleter.h",
"src/base/functional.cc",
"src/base/functional.h",
"src/base/hashmap-entry.h",
"src/base/hashmap.h",
"src/base/ieee754.cc",
"src/base/ieee754.h",
@ -2199,6 +2339,27 @@ v8_source_set("fuzzer_support") {
configs = [ ":internal_config_base" ]
deps = [
":v8",
]
public_deps = [
":v8_libplatform",
]
}
# Used by fuzzers that would require exposing too many symbols for a proper
# component build.
v8_source_set("fuzzer_support_nocomponent") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
"test/fuzzer/fuzzer-support.cc",
"test/fuzzer/fuzzer-support.h",
]
configs = [ ":internal_config_base" ]
deps = [
":v8_maybe_snapshot",
]
@ -2247,7 +2408,10 @@ v8_executable("mkpeephole") {
visibility = [ ":*" ] # Only targets in this file can depend on this.
sources = [
"src/interpreter/bytecode-operands.cc",
"src/interpreter/bytecode-operands.h",
"src/interpreter/bytecode-peephole-optimizer.h",
"src/interpreter/bytecode-traits.h",
"src/interpreter/bytecodes.cc",
"src/interpreter/bytecodes.h",
"src/interpreter/mkpeephole.cc",
@ -2336,6 +2500,7 @@ if (is_component_build) {
v8_executable("d8") {
sources = [
"$target_gen_dir/d8-js.cc",
"src/d8.cc",
"src/d8.h",
]
@ -2363,9 +2528,6 @@ v8_executable("d8") {
sources += [ "src/d8-windows.cc" ]
}
if (!is_component_build) {
sources += [ "$target_gen_dir/d8-js.cc" ]
}
if (v8_enable_i18n_support) {
deps += [ "//third_party/icu" ]
}
@ -2516,7 +2678,10 @@ v8_source_set("json_fuzzer") {
":fuzzer_support",
]
configs = [ ":internal_config" ]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("json_fuzzer") {
@ -2528,10 +2693,13 @@ v8_source_set("parser_fuzzer") {
]
deps = [
":fuzzer_support",
":fuzzer_support_nocomponent",
]
configs = [ ":internal_config" ]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("parser_fuzzer") {
@ -2546,12 +2714,38 @@ v8_source_set("regexp_fuzzer") {
":fuzzer_support",
]
configs = [ ":internal_config" ]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("regexp_fuzzer") {
}
v8_source_set("wasm_module_runner") {
sources = [
"test/common/wasm/wasm-module-runner.cc",
"test/common/wasm/wasm-module-runner.h",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_source_set("wasm_test_signatures") {
sources = [
"test/common/wasm/test-signatures.h",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_source_set("wasm_fuzzer") {
sources = [
"test/fuzzer/wasm.cc",
@ -2559,9 +2753,13 @@ v8_source_set("wasm_fuzzer") {
deps = [
":fuzzer_support",
":wasm_module_runner",
]
configs = [ ":internal_config" ]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_fuzzer") {
@ -2574,10 +2772,186 @@ v8_source_set("wasm_asmjs_fuzzer") {
deps = [
":fuzzer_support",
":wasm_module_runner",
]
configs = [ ":internal_config" ]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_asmjs_fuzzer") {
}
v8_source_set("wasm_code_fuzzer") {
sources = [
"test/fuzzer/wasm-code.cc",
]
deps = [
":fuzzer_support",
":wasm_module_runner",
":wasm_test_signatures",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_code_fuzzer") {
}
v8_source_set("lib_wasm_section_fuzzer") {
sources = [
"test/fuzzer/wasm-section-fuzzers.cc",
"test/fuzzer/wasm-section-fuzzers.h",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_source_set("wasm_types_section_fuzzer") {
sources = [
"test/fuzzer/wasm-types-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_types_section_fuzzer") {
}
v8_source_set("wasm_names_section_fuzzer") {
sources = [
"test/fuzzer/wasm-names-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_names_section_fuzzer") {
}
v8_source_set("wasm_globals_section_fuzzer") {
sources = [
"test/fuzzer/wasm-globals-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_globals_section_fuzzer") {
}
v8_source_set("wasm_imports_section_fuzzer") {
sources = [
"test/fuzzer/wasm-imports-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_imports_section_fuzzer") {
}
v8_source_set("wasm_function_sigs_section_fuzzer") {
sources = [
"test/fuzzer/wasm-function-sigs-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_function_sigs_section_fuzzer") {
}
v8_source_set("wasm_memory_section_fuzzer") {
sources = [
"test/fuzzer/wasm-memory-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_memory_section_fuzzer") {
}
v8_source_set("wasm_data_section_fuzzer") {
sources = [
"test/fuzzer/wasm-data-section.cc",
]
deps = [
":fuzzer_support",
":lib_wasm_section_fuzzer",
":wasm_module_runner",
]
configs = [
":external_config",
":internal_config_base",
]
}
v8_fuzzer("wasm_data_section_fuzzer") {
}

1984
deps/v8/ChangeLog поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

84
deps/v8/DEPS поставляемый
Просмотреть файл

@ -3,61 +3,62 @@
# all paths in here must match this assumption.
vars = {
"git_url": "https://chromium.googlesource.com",
"chromium_url": "https://chromium.googlesource.com",
}
deps = {
"v8/build":
Var("git_url") + "/chromium/src/build.git" + "@" + "59daf502c36f20b5c9292f4bd9af85791f8a5884",
Var("chromium_url") + "/chromium/src/build.git" + "@" + "475d5b37ded6589c9f8a0d19ced54ddf2e6d14a0",
"v8/tools/gyp":
Var("git_url") + "/external/gyp.git" + "@" + "702ac58e477214c635d9b541932e75a95d349352",
Var("chromium_url") + "/external/gyp.git" + "@" + "e7079f0e0e14108ab0dba58728ff219637458563",
"v8/third_party/icu":
Var("git_url") + "/chromium/deps/icu.git" + "@" + "2341038bf72869a5683a893a2b319a48ffec7f62",
Var("chromium_url") + "/chromium/deps/icu.git" + "@" + "b0bd3ee50bc2e768d7a17cbc60d87f517f024dbe",
"v8/third_party/instrumented_libraries":
Var("git_url") + "/chromium/src/third_party/instrumented_libraries.git" + "@" + "f15768d7fdf68c0748d20738184120c8ab2e6db7",
Var("chromium_url") + "/chromium/src/third_party/instrumented_libraries.git" + "@" + "45f5814b1543e41ea0be54c771e3840ea52cca4a",
"v8/buildtools":
Var("git_url") + "/chromium/buildtools.git" + "@" + "adb8bf4e8fc92aa1717bf151b862d58e6f27c4f2",
Var("chromium_url") + "/chromium/buildtools.git" + "@" + "5fd66957f08bb752dca714a591c84587c9d70762",
"v8/base/trace_event/common":
Var("git_url") + "/chromium/src/base/trace_event/common.git" + "@" + "315bf1e2d45be7d53346c31cfcc37424a32c30c8",
Var("chromium_url") + "/chromium/src/base/trace_event/common.git" + "@" + "e0fa02a02f61430dae2bddfd89a334ea4389f495",
"v8/third_party/WebKit/Source/platform/inspector_protocol":
Var("git_url") + "/chromium/src/third_party/WebKit/Source/platform/inspector_protocol.git" + "@" + "547960151fb364dd9a382fa79ffc9abfb184e3d1",
Var("chromium_url") + "/chromium/src/third_party/WebKit/Source/platform/inspector_protocol.git" + "@" + "3280c57c4c575ce82ccd13e4a403492fb4ca624b",
"v8/third_party/jinja2":
Var("git_url") + "/chromium/src/third_party/jinja2.git" + "@" + "2222b31554f03e62600cd7e383376a7c187967a1",
Var("chromium_url") + "/chromium/src/third_party/jinja2.git" + "@" + "b61a2c009a579593a259c1b300e0ad02bf48fd78",
"v8/third_party/markupsafe":
Var("git_url") + "/chromium/src/third_party/markupsafe.git" + "@" + "484a5661041cac13bfc688a26ec5434b05d18961",
"v8/tools/mb":
Var('git_url') + '/chromium/src/tools/mb.git' + '@' + "99788b8b516c44d7db25cfb68695bc234fdee5ed",
Var("chromium_url") + "/chromium/src/third_party/markupsafe.git" + "@" + "484a5661041cac13bfc688a26ec5434b05d18961",
"v8/tools/swarming_client":
Var('git_url') + '/external/swarming.client.git' + '@' + "e4288c3040a32f2e7ad92f957668f2ee3d36e5a6",
Var('chromium_url') + '/external/swarming.client.git' + '@' + "380e32662312eb107f06fcba6409b0409f8fef72",
"v8/testing/gtest":
Var("git_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
Var("chromium_url") + "/external/github.com/google/googletest.git" + "@" + "6f8a66431cb592dad629028a50b3dd418a408c87",
"v8/testing/gmock":
Var("git_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be",
Var("chromium_url") + "/external/googlemock.git" + "@" + "0421b6f358139f02e102c9c332ce19a33faf75be",
"v8/test/benchmarks/data":
Var("git_url") + "/v8/deps/third_party/benchmarks.git" + "@" + "05d7188267b4560491ff9155c5ee13e207ecd65f",
Var("chromium_url") + "/v8/deps/third_party/benchmarks.git" + "@" + "05d7188267b4560491ff9155c5ee13e207ecd65f",
"v8/test/mozilla/data":
Var("git_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
"v8/test/simdjs/data": Var("git_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "baf493985cb9ea7cdbd0d68704860a8156de9556",
Var("chromium_url") + "/v8/deps/third_party/mozilla-tests.git" + "@" + "f6c578a10ea707b1a8ab0b88943fe5115ce2b9be",
"v8/test/simdjs/data": Var("chromium_url") + "/external/github.com/tc39/ecmascript_simd.git" + "@" + "baf493985cb9ea7cdbd0d68704860a8156de9556",
"v8/test/test262/data":
Var("git_url") + "/external/github.com/tc39/test262.git" + "@" + "88bc7fe7586f161201c5f14f55c9c489f82b1b67",
Var("chromium_url") + "/external/github.com/tc39/test262.git" + "@" + "29c23844494a7cc2fbebc6948d2cb0bcaddb24e7",
"v8/test/test262/harness":
Var("git_url") + "/external/github.com/test262-utils/test262-harness-py.git" + "@" + "cbd968f54f7a95c6556d53ba852292a4c49d11d8",
Var("chromium_url") + "/external/github.com/test262-utils/test262-harness-py.git" + "@" + "cbd968f54f7a95c6556d53ba852292a4c49d11d8",
"v8/tools/clang":
Var("git_url") + "/chromium/src/tools/clang.git" + "@" + "3afb04a8153e40ff00f9eaa14337851c3ab4a368",
Var("chromium_url") + "/chromium/src/tools/clang.git" + "@" + "1f92f999fc374a479e98a189ebdfe25c09484486",
}
deps_os = {
"android": {
"v8/third_party/android_tools":
Var("git_url") + "/android_tools.git" + "@" + "af1c5a4cd6329ccdcf8c2bc93d9eea02f9d74869",
Var("chromium_url") + "/android_tools.git" + "@" + "25d57ead05d3dfef26e9c19b13ed10b0a69829cf",
},
"win": {
"v8/third_party/cygwin":
Var("git_url") + "/chromium/deps/cygwin.git" + "@" + "c89e446b273697fadf3a10ff1007a97c0b7de6df",
Var("chromium_url") + "/chromium/deps/cygwin.git" + "@" + "c89e446b273697fadf3a10ff1007a97c0b7de6df",
}
}
recursedeps = [ 'v8/third_party/android_tools' ]
recursedeps = [
"v8/buildtools",
"v8/third_party/android_tools",
]
include_rules = [
# Everybody can use some things.
@ -203,6 +204,39 @@ hooks = [
"-s", "v8/buildtools/linux64/gn.sha1",
],
},
{
"name": "wasm_fuzzer",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--no_auth",
"-u",
"--bucket", "v8-wasm-fuzzer",
"-s", "v8/test/fuzzer/wasm.tar.gz.sha1",
],
},
{
"name": "wasm_asmjs_fuzzer",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--no_auth",
"-u",
"--bucket", "v8-wasm-asmjs-fuzzer",
"-s", "v8/test/fuzzer/wasm_asmjs.tar.gz.sha1",
],
},
{
"name": "closure_compiler",
"pattern": ".",
"action": [ "download_from_google_storage",
"--no_resume",
"--no_auth",
"-u",
"--bucket", "chromium-v8-closure-compiler",
"-s", "v8/src/inspector/build/closure-compiler.tar.gz.sha1",
],
},
{
# Downloads the current stable linux sysroot to build/linux/ if needed.
# This sysroot updates at about the same rate that the chrome build deps
@ -259,6 +293,6 @@ hooks = [
{
# A change to a .gyp, .gypi, or to GYP itself should run the generator.
"pattern": ".",
"action": ["python", "v8/gypfiles/gyp_v8"],
"action": ["python", "v8/gypfiles/gyp_v8", "--running-as-hook"],
},
]

1
deps/v8/OWNERS поставляемый
Просмотреть файл

@ -22,7 +22,6 @@ mtrofin@chromium.org
mvstanton@chromium.org
mythria@chromium.org
neis@chromium.org
oth@chromium.org
rmcilroy@chromium.org
rossberg@chromium.org
titzer@chromium.org

33
deps/v8/PRESUBMIT.py поставляемый
Просмотреть файл

@ -216,6 +216,38 @@ def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
return []
def _CheckMissingFiles(input_api, output_api):
"""Runs verify_source_deps.py to ensure no files were added that are not in
GN.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools')]
from verify_source_deps import missing_gn_files, missing_gyp_files
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
gn_files = missing_gn_files()
gyp_files = missing_gyp_files()
results = []
if gn_files:
results.append(output_api.PresubmitError(
"You added one or more source files but didn't update the\n"
"corresponding BUILD.gn files:\n",
gn_files))
if gyp_files:
results.append(output_api.PresubmitError(
"You added one or more source files but didn't update the\n"
"corresponding gyp files:\n",
gyp_files))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
@ -231,6 +263,7 @@ def _CommonChecks(input_api, output_api):
_CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
results.extend(
_CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api))
results.extend(_CheckMissingFiles(input_api, output_api))
return results

Просмотреть файл

@ -297,8 +297,8 @@
#define TRACE_EVENT_INSTANT_WITH_TIMESTAMP0(category_group, name, scope, \
timestamp) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_INSTANT, category_group, name, 0, 0, timestamp, \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_INSTANT, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE | scope)
// Syntactic sugars for the sampling tracing in the main thread.
@ -308,8 +308,8 @@
TRACE_EVENT_GET_SAMPLING_STATE_FOR_BUCKET(0)
#define TRACE_EVENT_SET_SAMPLING_STATE(category, name) \
TRACE_EVENT_SET_SAMPLING_STATE_FOR_BUCKET(0, category, name)
#define TRACE_EVENT_SET_NONCONST_SAMPLING_STATE(categoryAndName) \
TRACE_EVENT_SET_NONCONST_SAMPLING_STATE_FOR_BUCKET(0, categoryAndName)
#define TRACE_EVENT_SET_NONCONST_SAMPLING_STATE(category_and_name) \
TRACE_EVENT_SET_NONCONST_SAMPLING_STATE_FOR_BUCKET(0, category_and_name)
// Records a single BEGIN event called "name" immediately, with 0, 1 or 2
// associated arguments. If the category is not enabled, then this
@ -395,10 +395,15 @@
TRACE_EVENT_FLAG_COPY, arg1_name, arg1_val, \
arg2_name, arg2_val)
#define TRACE_EVENT_MARK_WITH_TIMESTAMP0(category_group, name, timestamp) \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_MARK, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_MARK_WITH_TIMESTAMP1(category_group, name, timestamp, \
arg1_name, arg1_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_MARK, category_group, name, 0, 0, timestamp, \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_MARK, category_group, name, timestamp, \
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val)
#define TRACE_EVENT_COPY_MARK(category_group, name) \
@ -406,8 +411,8 @@
TRACE_EVENT_FLAG_COPY)
#define TRACE_EVENT_COPY_MARK_WITH_TIMESTAMP(category_group, name, timestamp) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_MARK, category_group, name, 0, 0, timestamp, \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_MARK, category_group, name, timestamp, \
TRACE_EVENT_FLAG_COPY)
// Similar to TRACE_EVENT_ENDx but with a custom |at| timestamp provided.
@ -544,6 +549,12 @@
TRACE_EVENT_PHASE_SAMPLE, category_group, name, 0, thread_id, timestamp, \
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, arg2_name, arg2_val)
#define TRACE_EVENT_SAMPLE_WITH_ID1(category_group, name, id, arg1_name, \
arg1_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_SAMPLE, category_group, \
name, id, TRACE_EVENT_FLAG_NONE, arg1_name, \
arg1_val)
// ASYNC_STEP_* APIs should be only used by legacy code. New code should
// consider using NESTABLE_ASYNC_* APIs to describe substeps within an async
// event.
@ -774,16 +785,19 @@
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, arg2_name, arg2_val)
// Records a single NESTABLE_ASYNC_INSTANT event called "name" immediately,
// with one associated argument. If the category is not enabled, then this
// does nothing.
// with none, one or two associated argument. If the category is not enabled,
// then this does nothing.
#define TRACE_EVENT_NESTABLE_ASYNC_INSTANT0(category_group, name, id) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_NESTABLE_ASYNC_INSTANT, \
category_group, name, id, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_NESTABLE_ASYNC_INSTANT1(category_group, name, id, \
arg1_name, arg1_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID(TRACE_EVENT_PHASE_NESTABLE_ASYNC_INSTANT, \
category_group, name, id, \
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val)
// Records a single NESTABLE_ASYNC_INSTANT event called "name" immediately,
// with 2 associated arguments. If the category is not enabled, then this
// does nothing.
#define TRACE_EVENT_NESTABLE_ASYNC_INSTANT2( \
category_group, name, id, arg1_name, arg1_val, arg2_name, arg2_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
@ -828,15 +842,6 @@
TRACE_EVENT_PHASE_NESTABLE_ASYNC_END, category_group, name, id, \
TRACE_EVENT_API_CURRENT_THREAD_ID, timestamp, TRACE_EVENT_FLAG_COPY)
// Records a single NESTABLE_ASYNC_INSTANT event called "name" immediately,
// with 2 associated arguments. If the category is not enabled, then this
// does nothing.
#define TRACE_EVENT_NESTABLE_ASYNC_INSTANT2( \
category_group, name, id, arg1_name, arg1_val, arg2_name, arg2_val) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_NESTABLE_ASYNC_INSTANT, category_group, name, id, \
TRACE_EVENT_FLAG_NONE, arg1_name, arg1_val, arg2_name, arg2_val)
// Records a single FLOW_BEGIN event called "name" immediately, with 0, 1 or 2
// associated arguments. If the category is not enabled, then this
// does nothing.
@ -958,48 +963,47 @@
#define TRACE_EVENT_CLOCK_SYNC_ISSUER(sync_id, issue_ts, issue_end_ts) \
INTERNAL_TRACE_EVENT_ADD_WITH_TIMESTAMP( \
TRACE_EVENT_PHASE_CLOCK_SYNC, "__metadata", "clock_sync", \
issue_end_ts.ToInternalValue(), TRACE_EVENT_FLAG_NONE, \
"sync_id", sync_id, "issue_ts", issue_ts.ToInternalValue())
issue_end_ts, TRACE_EVENT_FLAG_NONE, \
"sync_id", sync_id, "issue_ts", issue_ts)
// Macros to track the life time and value of arbitrary client objects.
// See also TraceTrackableObject.
#define TRACE_EVENT_OBJECT_CREATED_WITH_ID(category_group, name, id) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_CREATE_OBJECT, category_group, name, \
TRACE_ID_DONT_MANGLE(id), TRACE_EVENT_FLAG_NONE)
TRACE_EVENT_PHASE_CREATE_OBJECT, category_group, name, id, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID(category_group, name, id, \
snapshot) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_SNAPSHOT_OBJECT, category_group, name, \
TRACE_ID_DONT_MANGLE(id), TRACE_EVENT_FLAG_NONE, "snapshot", snapshot)
id, TRACE_EVENT_FLAG_NONE, "snapshot", snapshot)
#define TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID_AND_TIMESTAMP( \
category_group, name, id, timestamp, snapshot) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_SNAPSHOT_OBJECT, category_group, name, \
TRACE_ID_DONT_MANGLE(id), TRACE_EVENT_API_CURRENT_THREAD_ID, timestamp, \
TRACE_EVENT_FLAG_NONE, "snapshot", snapshot)
#define TRACE_EVENT_OBJECT_SNAPSHOT_WITH_ID_AND_TIMESTAMP( \
category_group, name, id, timestamp, snapshot) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
TRACE_EVENT_PHASE_SNAPSHOT_OBJECT, category_group, name, \
id, TRACE_EVENT_API_CURRENT_THREAD_ID, timestamp, TRACE_EVENT_FLAG_NONE, \
"snapshot", snapshot)
#define TRACE_EVENT_OBJECT_DELETED_WITH_ID(category_group, name, id) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_DELETE_OBJECT, category_group, name, \
TRACE_ID_DONT_MANGLE(id), TRACE_EVENT_FLAG_NONE)
TRACE_EVENT_PHASE_DELETE_OBJECT, category_group, name, id, \
TRACE_EVENT_FLAG_NONE)
// Records entering and leaving trace event contexts. |category_group| and
// |name| specify the context category and type. |context| is a
// snapshotted context object id.
#define TRACE_EVENT_ENTER_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_ENTER_CONTEXT, category_group, name, \
TRACE_ID_DONT_MANGLE(context), TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_LEAVE_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_LEAVE_CONTEXT, category_group, name, \
TRACE_ID_DONT_MANGLE(context), TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_ENTER_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_ENTER_CONTEXT, category_group, name, context, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_LEAVE_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_LEAVE_CONTEXT, category_group, name, context, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_SCOPED_CONTEXT(category_group, name, \
TRACE_ID_DONT_MANGLE(context))
INTERNAL_TRACE_EVENT_SCOPED_CONTEXT(category_group, name, context)
// Macro to specify that two trace IDs are identical. For example,
// TRACE_BIND_IDS(
@ -1083,6 +1087,7 @@
#define TRACE_EVENT_FLAG_NONE (static_cast<unsigned int>(0))
#define TRACE_EVENT_FLAG_COPY (static_cast<unsigned int>(1 << 0))
#define TRACE_EVENT_FLAG_HAS_ID (static_cast<unsigned int>(1 << 1))
// TODO(crbug.com/639003): Free this bit after ID mangling is deprecated.
#define TRACE_EVENT_FLAG_MANGLE_ID (static_cast<unsigned int>(1 << 2))
#define TRACE_EVENT_FLAG_SCOPE_OFFSET (static_cast<unsigned int>(1 << 3))
#define TRACE_EVENT_FLAG_SCOPE_EXTRA (static_cast<unsigned int>(1 << 4))
@ -1093,6 +1098,8 @@
#define TRACE_EVENT_FLAG_FLOW_OUT (static_cast<unsigned int>(1 << 9))
#define TRACE_EVENT_FLAG_HAS_CONTEXT_ID (static_cast<unsigned int>(1 << 10))
#define TRACE_EVENT_FLAG_HAS_PROCESS_ID (static_cast<unsigned int>(1 << 11))
#define TRACE_EVENT_FLAG_HAS_LOCAL_ID (static_cast<unsigned int>(1 << 12))
#define TRACE_EVENT_FLAG_HAS_GLOBAL_ID (static_cast<unsigned int>(1 << 13))
#define TRACE_EVENT_FLAG_SCOPE_MASK \
(static_cast<unsigned int>(TRACE_EVENT_FLAG_SCOPE_OFFSET | \

8
deps/v8/build_overrides/build.gni поставляемый
Просмотреть файл

@ -16,3 +16,11 @@ build_with_chromium = false
# Some non-Chromium builds don't support building java targets.
enable_java_templates = false
# Some non-Chromium builds don't use Chromium's third_party/binutils.
linux_use_bundled_binutils_override = true
# Allows different projects to specify their own suppressions files.
asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc"
lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc"
tsan_suppressions_file = "//build/sanitizers/tsan_suppressions.cc"

16
deps/v8/build_overrides/v8.gni поставляемый
Просмотреть файл

@ -11,10 +11,8 @@ if (is_android) {
import("//build/config/android/config.gni")
}
if (((v8_current_cpu == "x86" ||
v8_current_cpu == "x64" ||
v8_current_cpu=="x87") &&
(is_linux || is_mac)) ||
if (((v8_current_cpu == "x86" || v8_current_cpu == "x64" ||
v8_current_cpu == "x87") && (is_linux || is_mac)) ||
(v8_current_cpu == "ppc64" && is_linux)) {
v8_enable_gdbjit_default = true
}
@ -23,4 +21,12 @@ v8_imminent_deprecation_warnings_default = true
# Add simple extras solely for the purpose of the cctests.
v8_extra_library_files = [ "//test/cctest/test-extra.js" ]
v8_experimental_extra_library_files = [ "//test/cctest/test-experimental-extra.js" ]
v8_experimental_extra_library_files =
[ "//test/cctest/test-experimental-extra.js" ]
declare_args() {
# Enable inspector. See include/v8-inspector.h.
v8_enable_inspector = false
}
v8_enable_inspector_override = v8_enable_inspector

Просмотреть файл

@ -1,77 +0,0 @@
# Copyright 2015 the V8 project authors. All rights reserved.
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'conditions': [
# Copy the VS runtime DLLs into the isolate so that they
# don't have to be preinstalled on the target machine.
#
# VS2013 runtimes
['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp120d.dll',
'<(PRODUCT_DIR)/x64/msvcr120d.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp120.dll',
'<(PRODUCT_DIR)/x64/msvcr120.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp120d.dll',
'<(PRODUCT_DIR)/msvcr120d.dll',
],
},
}],
['OS=="win" and msvs_version==2013 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp120.dll',
'<(PRODUCT_DIR)/msvcr120.dll',
],
},
}],
# VS2015 runtimes
['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Debug"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp140d.dll',
'<(PRODUCT_DIR)/x64/vccorlib140d.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and CONFIGURATION_NAME=="Release"', {
'variables': {
'files': [
'<(PRODUCT_DIR)/x64/msvcp140.dll',
'<(PRODUCT_DIR)/x64/vccorlib140.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Debug" or CONFIGURATION_NAME=="Debug_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp140d.dll',
'<(PRODUCT_DIR)/vccorlib140d.dll',
],
},
}],
['OS=="win" and msvs_version==2015 and component=="shared_library" and (CONFIGURATION_NAME=="Release" or CONFIGURATION_NAME=="Release_x64")', {
'variables': {
'files': [
'<(PRODUCT_DIR)/msvcp140.dll',
'<(PRODUCT_DIR)/vccorlib140.dll',
],
},
}],
],
}

2
deps/v8/gypfiles/get_landmines.py поставляемый
Просмотреть файл

@ -28,6 +28,8 @@ def main():
print 'Clobbering to hopefully resolve problem with mksnapshot'
print 'Clobber after ICU roll.'
print 'Clobber after Android NDK update.'
print 'Clober to fix windows build problems.'
print 'Clober again to fix windows build problems.'
return 0

14
deps/v8/gypfiles/gyp_v8 поставляемый
Просмотреть файл

@ -118,10 +118,22 @@ def run_gyp(args):
if __name__ == '__main__':
args = sys.argv[1:]
if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
gyp_chromium_no_action = os.environ.get('GYP_CHROMIUM_NO_ACTION')
if gyp_chromium_no_action == '1':
print 'Skipping gyp_v8 due to GYP_CHROMIUM_NO_ACTION env var.'
sys.exit(0)
running_as_hook = '--running-as-hook'
if running_as_hook in args and gyp_chromium_no_action != '0':
print 'GYP is now disabled by default in runhooks.\n'
print 'If you really want to run this, either run '
print '`python gypfiles/gyp_v8` explicitly by hand '
print 'or set the environment variable GYP_CHROMIUM_NO_ACTION=0.'
sys.exit(0)
if running_as_hook in args:
args.remove(running_as_hook)
gyp_environment.set_environment()
# This could give false positives since it doesn't actually do real option

6
deps/v8/gypfiles/standalone.gypi поставляемый
Просмотреть файл

@ -46,6 +46,7 @@
'msvs_multi_core_compile%': '1',
'mac_deployment_target%': '10.7',
'release_extra_cflags%': '',
'v8_enable_inspector%': 0,
'variables': {
'variables': {
'variables': {
@ -319,7 +320,7 @@
'android_ndk_root%': '<(base_dir)/third_party/android_tools/ndk/',
'android_host_arch%': "<!(uname -m | sed -e 's/i[3456]86/x86/')",
# Version of the NDK. Used to ensure full rebuilds on NDK rolls.
'android_ndk_version%': 'r11c',
'android_ndk_version%': 'r12b',
'host_os%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')",
'os_folder_name%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/darwin/')",
},
@ -378,6 +379,9 @@
'arm_version%': '<(arm_version)',
'host_os%': '<(host_os)',
# Print to stdout on Android.
'v8_android_log_stdout%': 1,
'conditions': [
['android_ndk_root==""', {
'variables': {

4
deps/v8/include/DEPS поставляемый Normal file
Просмотреть файл

@ -0,0 +1,4 @@
include_rules = [
# v8-inspector-protocol.h depends on generated files under include/inspector.
"+inspector",
]

5
deps/v8/include/OWNERS поставляемый
Просмотреть файл

@ -1,2 +1,7 @@
danno@chromium.org
jochen@chromium.org
per-file v8-inspector.h=dgozman@chromium.org
per-file v8-inspector.h=pfeldman@chromium.org
per-file v8-inspector-protocol.h=dgozman@chromium.org
per-file v8-inspector-protocol.h=pfeldman@chromium.org

62
deps/v8/include/libplatform/v8-tracing.h поставляемый
Просмотреть файл

@ -7,9 +7,17 @@
#include <fstream>
#include <memory>
#include <unordered_set>
#include <vector>
#include "v8-platform.h" // NOLINT(build/include)
namespace v8 {
namespace base {
class Mutex;
} // namespace base
namespace platform {
namespace tracing {
@ -28,19 +36,22 @@ class TraceObject {
TraceObject() {}
~TraceObject();
void Initialize(char phase, const uint8_t* category_enabled_flag,
const char* name, const char* scope, uint64_t id,
uint64_t bind_id, int num_args, const char** arg_names,
const uint8_t* arg_types, const uint64_t* arg_values,
unsigned int flags);
void Initialize(
char phase, const uint8_t* category_enabled_flag, const char* name,
const char* scope, uint64_t id, uint64_t bind_id, int num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values,
std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
unsigned int flags);
void UpdateDuration();
void InitializeForTesting(char phase, const uint8_t* category_enabled_flag,
const char* name, const char* scope, uint64_t id,
uint64_t bind_id, int num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values, unsigned int flags,
int pid, int tid, int64_t ts, int64_t tts,
uint64_t duration, uint64_t cpu_duration);
void InitializeForTesting(
char phase, const uint8_t* category_enabled_flag, const char* name,
const char* scope, uint64_t id, uint64_t bind_id, int num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values,
std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
unsigned int flags, int pid, int tid, int64_t ts, int64_t tts,
uint64_t duration, uint64_t cpu_duration);
int pid() const { return pid_; }
int tid() const { return tid_; }
@ -56,6 +67,9 @@ class TraceObject {
const char** arg_names() { return arg_names_; }
uint8_t* arg_types() { return arg_types_; }
ArgValue* arg_values() { return arg_values_; }
std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables() {
return arg_convertables_;
}
unsigned int flags() const { return flags_; }
int64_t ts() { return ts_; }
int64_t tts() { return tts_; }
@ -71,10 +85,12 @@ class TraceObject {
const uint8_t* category_enabled_flag_;
uint64_t id_;
uint64_t bind_id_;
int num_args_;
int num_args_ = 0;
const char* arg_names_[kTraceMaxNumArgs];
uint8_t arg_types_[kTraceMaxNumArgs];
ArgValue arg_values_[kTraceMaxNumArgs];
std::unique_ptr<v8::ConvertableToTraceFormat>
arg_convertables_[kTraceMaxNumArgs];
char* parameter_copy_storage_ = nullptr;
unsigned int flags_;
int64_t ts_;
@ -217,21 +233,27 @@ class TracingController {
ENABLED_FOR_ETW_EXPORT = 1 << 3
};
TracingController() {}
TracingController();
~TracingController();
void Initialize(TraceBuffer* trace_buffer);
const uint8_t* GetCategoryGroupEnabled(const char* category_group);
static const char* GetCategoryGroupName(const uint8_t* category_enabled_flag);
uint64_t AddTraceEvent(char phase, const uint8_t* category_enabled_flag,
const char* name, const char* scope, uint64_t id,
uint64_t bind_id, int32_t num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values, unsigned int flags);
uint64_t AddTraceEvent(
char phase, const uint8_t* category_enabled_flag, const char* name,
const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values,
std::unique_ptr<v8::ConvertableToTraceFormat>* arg_convertables,
unsigned int flags);
void UpdateTraceEventDuration(const uint8_t* category_enabled_flag,
const char* name, uint64_t handle);
void StartTracing(TraceConfig* trace_config);
void StopTracing();
void AddTraceStateObserver(Platform::TraceStateObserver* observer);
void RemoveTraceStateObserver(Platform::TraceStateObserver* observer);
private:
const uint8_t* GetCategoryGroupEnabledInternal(const char* category_group);
void UpdateCategoryGroupEnabledFlag(size_t category_index);
@ -239,6 +261,8 @@ class TracingController {
std::unique_ptr<TraceBuffer> trace_buffer_;
std::unique_ptr<TraceConfig> trace_config_;
std::unique_ptr<base::Mutex> mutex_;
std::unordered_set<Platform::TraceStateObserver*> observers_;
Mode mode_ = DISABLED;
// Disallow copy and assign

13
deps/v8/include/v8-inspector-protocol.h поставляемый Normal file
Просмотреть файл

@ -0,0 +1,13 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_V8_INSPECTOR_PROTOCOL_H_
#define V8_V8_INSPECTOR_PROTOCOL_H_
#include "inspector/Debugger.h" // NOLINT(build/include)
#include "inspector/Runtime.h" // NOLINT(build/include)
#include "inspector/Schema.h" // NOLINT(build/include)
#include "v8-inspector.h" // NOLINT(build/include)
#endif // V8_V8_INSPECTOR_PROTOCOL_H_

267
deps/v8/include/v8-inspector.h поставляемый Normal file
Просмотреть файл

@ -0,0 +1,267 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_V8_INSPECTOR_H_
#define V8_V8_INSPECTOR_H_
#include <stdint.h>
#include <cctype>
#include <memory>
#include "v8.h" // NOLINT(build/include)
namespace v8_inspector {
namespace protocol {
namespace Debugger {
namespace API {
class SearchMatch;
}
}
namespace Runtime {
namespace API {
class RemoteObject;
class StackTrace;
}
}
namespace Schema {
namespace API {
class Domain;
}
}
} // namespace protocol
class V8_EXPORT StringView {
public:
StringView() : m_is8Bit(true), m_length(0), m_characters8(nullptr) {}
StringView(const uint8_t* characters, size_t length)
: m_is8Bit(true), m_length(length), m_characters8(characters) {}
StringView(const uint16_t* characters, size_t length)
: m_is8Bit(false), m_length(length), m_characters16(characters) {}
bool is8Bit() const { return m_is8Bit; }
size_t length() const { return m_length; }
// TODO(dgozman): add DCHECK(m_is8Bit) to accessors once platform can be used
// here.
const uint8_t* characters8() const { return m_characters8; }
const uint16_t* characters16() const { return m_characters16; }
private:
bool m_is8Bit;
size_t m_length;
union {
const uint8_t* m_characters8;
const uint16_t* m_characters16;
};
};
class V8_EXPORT StringBuffer {
public:
virtual ~StringBuffer() {}
virtual const StringView& string() = 0;
// This method copies contents.
static std::unique_ptr<StringBuffer> create(const StringView&);
};
class V8_EXPORT V8ContextInfo {
public:
V8ContextInfo(v8::Local<v8::Context> context, int contextGroupId,
const StringView& humanReadableName)
: context(context),
contextGroupId(contextGroupId),
humanReadableName(humanReadableName),
hasMemoryOnConsole(false) {}
v8::Local<v8::Context> context;
// Each v8::Context is a part of a group. The group id must be non-zero.
int contextGroupId;
StringView humanReadableName;
StringView origin;
StringView auxData;
bool hasMemoryOnConsole;
private:
// Disallow copying and allocating this one.
enum NotNullTagEnum { NotNullLiteral };
void* operator new(size_t) = delete;
void* operator new(size_t, NotNullTagEnum, void*) = delete;
void* operator new(size_t, void*) = delete;
V8ContextInfo(const V8ContextInfo&) = delete;
V8ContextInfo& operator=(const V8ContextInfo&) = delete;
};
class V8_EXPORT V8StackTrace {
public:
virtual bool isEmpty() const = 0;
virtual StringView topSourceURL() const = 0;
virtual int topLineNumber() const = 0;
virtual int topColumnNumber() const = 0;
virtual StringView topScriptId() const = 0;
virtual StringView topFunctionName() const = 0;
virtual ~V8StackTrace() {}
virtual std::unique_ptr<protocol::Runtime::API::StackTrace>
buildInspectorObject() const = 0;
virtual std::unique_ptr<StringBuffer> toString() const = 0;
// Safe to pass between threads, drops async chain.
virtual std::unique_ptr<V8StackTrace> clone() = 0;
};
class V8_EXPORT V8InspectorSession {
public:
virtual ~V8InspectorSession() {}
// Cross-context inspectable values (DOM nodes in different worlds, etc.).
class V8_EXPORT Inspectable {
public:
virtual v8::Local<v8::Value> get(v8::Local<v8::Context>) = 0;
virtual ~Inspectable() {}
};
virtual void addInspectedObject(std::unique_ptr<Inspectable>) = 0;
// Dispatching protocol messages.
static bool canDispatchMethod(const StringView& method);
virtual void dispatchProtocolMessage(const StringView& message) = 0;
virtual std::unique_ptr<StringBuffer> stateJSON() = 0;
virtual std::vector<std::unique_ptr<protocol::Schema::API::Domain>>
supportedDomains() = 0;
// Debugger actions.
virtual void schedulePauseOnNextStatement(const StringView& breakReason,
const StringView& breakDetails) = 0;
virtual void cancelPauseOnNextStatement() = 0;
virtual void breakProgram(const StringView& breakReason,
const StringView& breakDetails) = 0;
virtual void setSkipAllPauses(bool) = 0;
virtual void resume() = 0;
virtual void stepOver() = 0;
virtual std::vector<std::unique_ptr<protocol::Debugger::API::SearchMatch>>
searchInTextByLines(const StringView& text, const StringView& query,
bool caseSensitive, bool isRegex) = 0;
// Remote objects.
virtual std::unique_ptr<protocol::Runtime::API::RemoteObject> wrapObject(
v8::Local<v8::Context>, v8::Local<v8::Value>,
const StringView& groupName) = 0;
virtual bool unwrapObject(std::unique_ptr<StringBuffer>* error,
const StringView& objectId, v8::Local<v8::Value>*,
v8::Local<v8::Context>*,
std::unique_ptr<StringBuffer>* objectGroup) = 0;
virtual void releaseObjectGroup(const StringView&) = 0;
};
enum class V8ConsoleAPIType { kClear, kDebug, kLog, kInfo, kWarning, kError };
class V8_EXPORT V8InspectorClient {
public:
virtual ~V8InspectorClient() {}
virtual void runMessageLoopOnPause(int contextGroupId) {}
virtual void quitMessageLoopOnPause() {}
virtual void runIfWaitingForDebugger(int contextGroupId) {}
virtual void muteMetrics(int contextGroupId) {}
virtual void unmuteMetrics(int contextGroupId) {}
virtual void beginUserGesture() {}
virtual void endUserGesture() {}
virtual std::unique_ptr<StringBuffer> valueSubtype(v8::Local<v8::Value>) {
return nullptr;
}
virtual bool formatAccessorsAsProperties(v8::Local<v8::Value>) {
return false;
}
virtual bool isInspectableHeapObject(v8::Local<v8::Object>) { return true; }
virtual v8::Local<v8::Context> ensureDefaultContextInGroup(
int contextGroupId) {
return v8::Local<v8::Context>();
}
virtual void beginEnsureAllContextsInGroup(int contextGroupId) {}
virtual void endEnsureAllContextsInGroup(int contextGroupId) {}
virtual void installAdditionalCommandLineAPI(v8::Local<v8::Context>,
v8::Local<v8::Object>) {}
virtual void consoleAPIMessage(int contextGroupId, V8ConsoleAPIType,
const StringView& message,
const StringView& url, unsigned lineNumber,
unsigned columnNumber, V8StackTrace*) {}
virtual v8::MaybeLocal<v8::Value> memoryInfo(v8::Isolate*,
v8::Local<v8::Context>) {
return v8::MaybeLocal<v8::Value>();
}
virtual void consoleTime(const StringView& title) {}
virtual void consoleTimeEnd(const StringView& title) {}
virtual void consoleTimeStamp(const StringView& title) {}
virtual double currentTimeMS() { return 0; }
typedef void (*TimerCallback)(void*);
virtual void startRepeatingTimer(double, TimerCallback, void* data) {}
virtual void cancelTimer(void* data) {}
// TODO(dgozman): this was added to support service worker shadow page. We
// should not connect at all.
virtual bool canExecuteScripts(int contextGroupId) { return true; }
};
class V8_EXPORT V8Inspector {
public:
static std::unique_ptr<V8Inspector> create(v8::Isolate*, V8InspectorClient*);
virtual ~V8Inspector() {}
// Contexts instrumentation.
virtual void contextCreated(const V8ContextInfo&) = 0;
virtual void contextDestroyed(v8::Local<v8::Context>) = 0;
virtual void resetContextGroup(int contextGroupId) = 0;
// Various instrumentation.
virtual void willExecuteScript(v8::Local<v8::Context>, int scriptId) = 0;
virtual void didExecuteScript(v8::Local<v8::Context>) = 0;
virtual void idleStarted() = 0;
virtual void idleFinished() = 0;
// Async stack traces instrumentation.
virtual void asyncTaskScheduled(const StringView& taskName, void* task,
bool recurring) = 0;
virtual void asyncTaskCanceled(void* task) = 0;
virtual void asyncTaskStarted(void* task) = 0;
virtual void asyncTaskFinished(void* task) = 0;
virtual void allAsyncTasksCanceled() = 0;
// Exceptions instrumentation.
virtual unsigned exceptionThrown(
v8::Local<v8::Context>, const StringView& message,
v8::Local<v8::Value> exception, const StringView& detailedMessage,
const StringView& url, unsigned lineNumber, unsigned columnNumber,
std::unique_ptr<V8StackTrace>, int scriptId) = 0;
virtual void exceptionRevoked(v8::Local<v8::Context>, unsigned exceptionId,
const StringView& message) = 0;
// Connection.
class V8_EXPORT Channel {
public:
virtual ~Channel() {}
virtual void sendProtocolResponse(int callId,
const StringView& message) = 0;
virtual void sendProtocolNotification(const StringView& message) = 0;
virtual void flushProtocolNotifications() = 0;
};
virtual std::unique_ptr<V8InspectorSession> connect(
int contextGroupId, Channel*, const StringView& state) = 0;
// API methods.
virtual std::unique_ptr<V8StackTrace> createStackTrace(
v8::Local<v8::StackTrace>) = 0;
virtual std::unique_ptr<V8StackTrace> captureStackTrace(bool fullStack) = 0;
};
} // namespace v8_inspector
#endif // V8_V8_INSPECTOR_H_

66
deps/v8/include/v8-platform.h поставляемый
Просмотреть файл

@ -7,6 +7,8 @@
#include <stddef.h>
#include <stdint.h>
#include <memory>
#include <string>
namespace v8 {
@ -17,24 +19,38 @@ class Isolate;
*/
class Task {
public:
virtual ~Task() {}
virtual ~Task() = default;
virtual void Run() = 0;
};
/**
* An IdleTask represents a unit of work to be performed in idle time.
* The Run method is invoked with an argument that specifies the deadline in
* seconds returned by MonotonicallyIncreasingTime().
* The idle task is expected to complete by this deadline.
*/
* An IdleTask represents a unit of work to be performed in idle time.
* The Run method is invoked with an argument that specifies the deadline in
* seconds returned by MonotonicallyIncreasingTime().
* The idle task is expected to complete by this deadline.
*/
class IdleTask {
public:
virtual ~IdleTask() {}
virtual ~IdleTask() = default;
virtual void Run(double deadline_in_seconds) = 0;
};
/**
* The interface represents complex arguments to trace events.
*/
class ConvertableToTraceFormat {
public:
virtual ~ConvertableToTraceFormat() = default;
/**
* Append the class info to the provided |out| string. The appended
* data must be a valid JSON object. Strings must be properly quoted, and
* escaped. There is no processing applied to the content after it is
* appended.
*/
virtual void AppendAsTraceFormat(std::string* out) const = 0;
};
/**
* V8 Platform abstraction layer.
@ -54,7 +70,7 @@ class Platform {
kLongRunningTask
};
virtual ~Platform() {}
virtual ~Platform() = default;
/**
* Gets the number of threads that are used to execute background tasks. Is
@ -158,12 +174,44 @@ class Platform {
return 0;
}
/**
* Adds a trace event to the platform tracing system. This function call is
* usually the result of a TRACE_* macro from trace_event_common.h when
* tracing and the category of the particular trace are enabled. It is not
* advisable to call this function on its own; it is really only meant to be
* used by the trace macros. The returned handle can be used by
* UpdateTraceEventDuration to update the duration of COMPLETE events.
*/
virtual uint64_t AddTraceEvent(
char phase, const uint8_t* category_enabled_flag, const char* name,
const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args,
const char** arg_names, const uint8_t* arg_types,
const uint64_t* arg_values,
std::unique_ptr<ConvertableToTraceFormat>* arg_convertables,
unsigned int flags) {
return AddTraceEvent(phase, category_enabled_flag, name, scope, id, bind_id,
num_args, arg_names, arg_types, arg_values, flags);
}
/**
* Sets the duration field of a COMPLETE trace event. It must be called with
* the handle returned from AddTraceEvent().
**/
virtual void UpdateTraceEventDuration(const uint8_t* category_enabled_flag,
const char* name, uint64_t handle) {}
class TraceStateObserver {
public:
virtual ~TraceStateObserver() = default;
virtual void OnTraceEnabled() = 0;
virtual void OnTraceDisabled() = 0;
};
/** Adds tracing state change observer. */
virtual void AddTraceStateObserver(TraceStateObserver*) {}
/** Removes tracing state change observer. */
virtual void RemoveTraceStateObserver(TraceStateObserver*) {}
};
} // namespace v8

28
deps/v8/include/v8-profiler.h поставляемый
Просмотреть файл

@ -46,6 +46,20 @@ template class V8_EXPORT std::vector<v8::CpuProfileDeoptInfo>;
namespace v8 {
/**
* TracingCpuProfiler monitors tracing being enabled/disabled
* and emits CpuProfile trace events once v8.cpu_profile2 tracing category
* is enabled. It has no overhead unless the category is enabled.
*/
class V8_EXPORT TracingCpuProfiler {
public:
static std::unique_ptr<TracingCpuProfiler> Create(Isolate*);
virtual ~TracingCpuProfiler() = default;
protected:
TracingCpuProfiler() = default;
};
// TickSample captures the information collected for each sample.
struct TickSample {
// Internal profiling (with --prof + tools/$OS-tick-processor) wants to
@ -131,12 +145,26 @@ class V8_EXPORT CpuProfileNode {
/** Returns function name (empty string for anonymous functions.) */
Local<String> GetFunctionName() const;
/**
* Returns function name (empty string for anonymous functions.)
* The string ownership is *not* passed to the caller. It stays valid until
* profile is deleted. The function is thread safe.
*/
const char* GetFunctionNameStr() const;
/** Returns id of the script where function is located. */
int GetScriptId() const;
/** Returns resource name for script from where the function originates. */
Local<String> GetScriptResourceName() const;
/**
* Returns resource name for script from where the function originates.
* The string ownership is *not* passed to the caller. It stays valid until
* profile is deleted. The function is thread safe.
*/
const char* GetScriptResourceNameStr() const;
/**
* Returns the number, 1-based, of the line where the function originates.
* kNoLineNumberInfo if no line number information is available.

17
deps/v8/include/v8-util.h поставляемый
Просмотреть файл

@ -206,14 +206,19 @@ class PersistentValueMapBase {
}
/**
* Call V8::RegisterExternallyReferencedObject with the map value for given
* key.
* Deprecated. Call V8::RegisterExternallyReferencedObject with the map value
* for given key.
* TODO(hlopko) Remove once migration to reporter is finished.
*/
void RegisterExternallyReferencedObject(K& key) {
void RegisterExternallyReferencedObject(K& key) {}
/**
* Use EmbedderReachableReferenceReporter with the map value for given key.
*/
void RegisterExternallyReferencedObject(
EmbedderReachableReferenceReporter* reporter, K& key) {
DCHECK(Contains(key));
V8::RegisterExternallyReferencedObject(
reinterpret_cast<internal::Object**>(FromVal(Traits::Get(&impl_, key))),
reinterpret_cast<internal::Isolate*>(GetIsolate()));
reporter->ReportExternalReference(FromVal(Traits::Get(&impl_, key)));
}
/**

6
deps/v8/include/v8-version.h поставляемый
Просмотреть файл

@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 5
#define V8_MINOR_VERSION 4
#define V8_BUILD_NUMBER 500
#define V8_PATCH_LEVEL 46
#define V8_MINOR_VERSION 5
#define V8_BUILD_NUMBER 372
#define V8_PATCH_LEVEL 40
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

983
deps/v8/include/v8.h поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

254
deps/v8/infra/mb/mb_config.pyl поставляемый
Просмотреть файл

@ -9,12 +9,18 @@
# Bots are ordered by appearance on waterfall.
'masters': {
'developer_default': {
'arm.debug': 'default_debug_arm',
'arm.optdebug': 'default_optdebug_arm',
'arm.release': 'default_release_arm',
'arm64.debug': 'default_debug_arm64',
'arm64.optdebug': 'default_optdebug_arm64',
'arm64.release': 'default_release_arm64',
'ia32.debug': 'default_debug_x86',
'ia32.optdebug': 'default_optdebug_x86',
'ia32.release': 'default_release_x86',
'x64.debug': 'default_debug_x64',
'x64.optdebug': 'default_optdebug_x64',
'x64.release': 'default_release_x64',
'x86.debug': 'default_debug_x86',
'x86.optdebug': 'default_optdebug_x86',
'x86.release': 'default_release_x86',
},
'client.dart.fyi': {
@ -32,7 +38,7 @@
'V8 Linux - nosnap builder': 'gn_release_x86_no_snap',
'V8 Linux - nosnap debug builder': 'gn_debug_x86_no_snap',
'V8 Linux - shared': 'gn_release_x86_shared_verify_heap',
'V8 Linux - noi18n - debug': 'gyp_debug_x86_no_i18n',
'V8 Linux - noi18n - debug': 'gn_debug_x86_no_i18n',
# Linux64.
'V8 Linux64 - builder': 'gn_release_x64',
'V8 Linux64 - debug builder': 'gn_debug_x64_valgrind',
@ -40,34 +46,35 @@
'V8 Linux64 - internal snapshot': 'gn_release_x64_internal',
'V8 Linux64 - gyp': 'gyp_release_x64',
# Windows.
'V8 Win32 - builder': 'gyp_release_x86_minimal_symbols',
'V8 Win32 - debug builder': 'gyp_debug_x86_minimal_symbols',
'V8 Win32 - builder': 'gn_release_x86_minimal_symbols',
'V8 Win32 - debug builder': 'gn_debug_x86_minimal_symbols',
'V8 Win32 - nosnap - shared':
'gyp_release_x86_no_snap_shared_minimal_symbols',
'V8 Win64': 'gyp_release_x64_minimal_symbols',
'V8 Win64 - debug': 'gyp_debug_x64_minimal_symbols',
'V8 Win64 - clang': 'gyp_release_x64_clang',
'gn_release_x86_no_snap_shared_minimal_symbols',
'V8 Win64': 'gn_release_x64_minimal_symbols',
'V8 Win64 - debug': 'gn_debug_x64_minimal_symbols',
# TODO(machenbach): Switch plugins on when errors are fixed.
'V8 Win64 - clang': 'gn_release_x64_clang',
# Mac.
'V8 Mac': 'gn_release_x86',
'V8 Mac - debug': 'gn_debug_x86',
'V8 Mac64': 'gn_release_x64',
'V8 Mac64 - debug': 'gn_debug_x64',
'V8 Mac GC Stress': 'gn_debug_x86',
'V8 Mac64 ASAN': 'gyp_release_x64_asan',
'V8 Mac64 ASAN': 'gn_release_x64_asan_no_lsan',
# Sanitizers.
'V8 Linux64 ASAN': 'gyp_release_x64_asan',
'V8 Linux64 ASAN': 'gn_release_x64_asan',
'V8 Linux64 TSAN': 'gn_release_x64_tsan',
'V8 Linux - arm64 - sim - MSAN': 'gn_release_simulate_arm64_msan',
# Clusterfuzz.
'V8 Linux64 ASAN no inline - release builder':
'gyp_release_x64_asan_symbolized_edge_verify_heap',
'V8 Linux64 ASAN - debug builder': 'gyp_debug_x64_asan_edge',
'gn_release_x64_asan_symbolized_edge_verify_heap',
'V8 Linux64 ASAN - debug builder': 'gn_debug_x64_asan_edge',
'V8 Linux64 ASAN arm64 - debug builder':
'gyp_debug_simulate_arm64_asan_edge',
'gn_debug_simulate_arm64_asan_edge',
'V8 Linux ASAN arm - debug builder':
'gyp_debug_simulate_arm_asan_edge',
'gn_debug_simulate_arm_asan_edge',
'V8 Linux ASAN mipsel - debug builder':
'gyp_debug_simulate_mipsel_asan_edge',
'gn_debug_simulate_mipsel_asan_edge',
# Misc.
'V8 Linux gcc 4.8': 'gn_release_x86_gcc',
# FYI.
@ -86,13 +93,13 @@
'client.v8.ports': {
# Arm.
'V8 Arm - builder': 'gyp_release_arm',
'V8 Arm - debug builder': 'gyp_debug_arm',
'V8 Android Arm - builder': 'gyp_release_android_arm',
'V8 Linux - arm - sim': 'gyp_release_simulate_arm',
'V8 Linux - arm - sim - debug': 'gyp_debug_simulate_arm',
'V8 Arm - builder': 'gn_release_arm',
'V8 Arm - debug builder': 'gn_debug_arm',
'V8 Android Arm - builder': 'gn_release_android_arm',
'V8 Linux - arm - sim': 'gn_release_simulate_arm',
'V8 Linux - arm - sim - debug': 'gn_debug_simulate_arm',
# Arm64.
'V8 Android Arm64 - builder': 'gyp_release_android_arm64',
'V8 Android Arm64 - builder': 'gn_release_android_arm64',
'V8 Linux - arm64 - sim': 'gn_release_simulate_arm64',
'V8 Linux - arm64 - sim - debug': 'gn_debug_simulate_arm64',
'V8 Linux - arm64 - sim - nosnap - debug':
@ -100,8 +107,8 @@
'V8 Linux - arm64 - sim - gc stress': 'gn_debug_simulate_arm64',
# Mips.
'V8 Mips - builder': 'gyp_release_mips_no_snap_no_i18n',
'V8 Linux - mipsel - sim - builder': 'gyp_release_simulate_mipsel',
'V8 Linux - mips64el - sim - builder': 'gyp_release_simulate_mips64el',
'V8 Linux - mipsel - sim - builder': 'gn_release_simulate_mipsel',
'V8 Linux - mips64el - sim - builder': 'gn_release_simulate_mips64el',
# PPC.
'V8 Linux - ppc - sim': 'gyp_release_simulate_ppc',
'V8 Linux - ppc64 - sim': 'gyp_release_simulate_ppc64',
@ -117,18 +124,18 @@
'V8 Linux - beta branch - debug': 'gn_debug_x86',
'V8 Linux - stable branch': 'gn_release_x86',
'V8 Linux - stable branch - debug': 'gn_debug_x86',
'V8 Linux64 - beta branch': 'gyp_release_x64',
'V8 Linux64 - beta branch': 'gn_release_x64',
'V8 Linux64 - beta branch - debug': 'gn_debug_x64',
'V8 Linux64 - stable branch': 'gn_release_x64',
'V8 Linux64 - stable branch - debug': 'gn_debug_x64',
'V8 arm - sim - beta branch': 'gyp_release_simulate_arm',
'V8 arm - sim - beta branch - debug': 'gyp_debug_simulate_arm',
'V8 arm - sim - stable branch': 'gyp_release_simulate_arm',
'V8 arm - sim - stable branch - debug': 'gyp_debug_simulate_arm',
'V8 mips64el - sim - beta branch': 'gyp_release_simulate_mips64el',
'V8 mips64el - sim - stable branch': 'gyp_release_simulate_mips64el',
'V8 mipsel - sim - beta branch': 'gyp_release_simulate_mipsel',
'V8 mipsel - sim - stable branch': 'gyp_release_simulate_mipsel',
'V8 arm - sim - beta branch': 'gn_release_simulate_arm',
'V8 arm - sim - beta branch - debug': 'gn_debug_simulate_arm',
'V8 arm - sim - stable branch': 'gn_release_simulate_arm',
'V8 arm - sim - stable branch - debug': 'gn_debug_simulate_arm',
'V8 mips64el - sim - beta branch': 'gn_release_simulate_mips64el',
'V8 mips64el - sim - stable branch': 'gn_release_simulate_mips64el',
'V8 mipsel - sim - beta branch': 'gn_release_simulate_mipsel',
'V8 mipsel - sim - stable branch': 'gn_release_simulate_mipsel',
'V8 ppc - sim - beta branch': 'gyp_release_simulate_ppc',
'V8 ppc - sim - stable branch': 'gyp_release_simulate_ppc',
'V8 ppc64 - sim - beta branch': 'gyp_release_simulate_ppc64',
@ -143,8 +150,8 @@
'v8_linux_avx2_dbg': 'gn_debug_x86_trybot',
'v8_linux_nodcheck_rel_ng': 'gn_release_x86_minimal_symbols',
'v8_linux_dbg_ng': 'gn_debug_x86_trybot',
'v8_linux_noi18n_rel_ng': 'gyp_release_x86_no_i18n_trybot',
'v8_linux_gc_stress_dbg': 'gyp_debug_x86_trybot',
'v8_linux_noi18n_rel_ng': 'gn_release_x86_no_i18n_trybot',
'v8_linux_gc_stress_dbg': 'gn_debug_x86_trybot',
'v8_linux_nosnap_rel': 'gn_release_x86_no_snap_trybot',
'v8_linux_nosnap_dbg': 'gn_debug_x86_no_snap_trybot',
'v8_linux_gcc_compile_rel': 'gn_release_x86_gcc_minimal_symbols',
@ -153,34 +160,34 @@
'v8_linux64_gyp_rel_ng': 'gyp_release_x64',
'v8_linux64_avx2_rel_ng': 'gn_release_x64_trybot',
'v8_linux64_avx2_dbg': 'gn_debug_x64_trybot',
'v8_linux64_asan_rel_ng': 'gyp_release_x64_asan_minimal_symbols',
'v8_linux64_asan_rel_ng': 'gn_release_x64_asan_minimal_symbols',
'v8_linux64_msan_rel': 'gn_release_simulate_arm64_msan_minimal_symbols',
'v8_linux64_sanitizer_coverage_rel':
'gyp_release_x64_asan_minimal_symbols_coverage',
'v8_linux64_tsan_rel': 'gn_release_x64_tsan_minimal_symbols',
'v8_win_dbg': 'gyp_debug_x86_trybot',
'v8_win_compile_dbg': 'gyp_debug_x86_trybot',
'v8_win_rel_ng': 'gyp_release_x86_trybot',
'v8_win_dbg': 'gn_debug_x86_trybot',
'v8_win_compile_dbg': 'gn_debug_x86_trybot',
'v8_win_rel_ng': 'gn_release_x86_trybot',
'v8_win_nosnap_shared_rel_ng':
'gyp_release_x86_no_snap_shared_minimal_symbols',
'v8_win64_dbg': 'gyp_debug_x64_minimal_symbols',
'v8_win64_rel_ng': 'gyp_release_x64_trybot',
'gn_release_x86_no_snap_shared_minimal_symbols',
'v8_win64_dbg': 'gn_debug_x64_minimal_symbols',
'v8_win64_rel_ng': 'gn_release_x64_trybot',
'v8_mac_rel_ng': 'gn_release_x86_trybot',
'v8_mac_dbg': 'gn_debug_x86_trybot',
'v8_mac_gc_stress_dbg': 'gn_debug_x86_trybot',
'v8_mac64_rel': 'gn_release_x64_trybot',
'v8_mac64_dbg': 'gn_debug_x64_minimal_symbols',
'v8_mac64_asan_rel': 'gyp_release_x64_asan',
'v8_linux_arm_rel_ng': 'gyp_release_simulate_arm_trybot',
'v8_linux_arm_dbg': 'gyp_debug_simulate_arm',
'v8_linux_arm_armv8a_rel': 'gyp_release_simulate_arm_trybot',
'v8_linux_arm_armv8a_dbg': 'gyp_debug_simulate_arm',
'v8_mac64_asan_rel': 'gn_release_x64_asan_no_lsan',
'v8_linux_arm_rel_ng': 'gn_release_simulate_arm_trybot',
'v8_linux_arm_dbg': 'gn_debug_simulate_arm',
'v8_linux_arm_armv8a_rel': 'gn_release_simulate_arm_trybot',
'v8_linux_arm_armv8a_dbg': 'gn_debug_simulate_arm',
'v8_linux_arm64_rel_ng': 'gn_release_simulate_arm64_trybot',
'v8_linux_arm64_dbg': 'gn_debug_simulate_arm64',
'v8_linux_arm64_gc_stress_dbg': 'gn_debug_simulate_arm64',
'v8_linux_mipsel_compile_rel': 'gyp_release_simulate_mipsel',
'v8_linux_mips64el_compile_rel': 'gyp_release_simulate_mips64el',
'v8_android_arm_compile_rel': 'gyp_release_android_arm',
'v8_linux_mipsel_compile_rel': 'gn_release_simulate_mipsel',
'v8_linux_mips64el_compile_rel': 'gn_release_simulate_mips64el',
'v8_android_arm_compile_rel': 'gn_release_android_arm',
},
},
@ -189,6 +196,20 @@
# gyp/gn, release/debug, arch type, other values alphabetically.
'configs': {
# Developer default configs.
'default_debug_arm': [
'gn', 'debug', 'simulate_arm', 'v8_enable_slow_dchecks',
'v8_full_debug'],
'default_optdebug_arm': [
'gn', 'debug', 'simulate_arm', 'v8_enable_slow_dchecks'],
'default_release_arm': [
'gn', 'release', 'simulate_arm'],
'default_debug_arm64': [
'gn', 'debug', 'simulate_arm64', 'v8_enable_slow_dchecks',
'v8_full_debug'],
'default_optdebug_arm64': [
'gn', 'debug', 'simulate_arm64', 'v8_enable_slow_dchecks'],
'default_release_arm64': [
'gn', 'release', 'simulate_arm64'],
'default_debug_x64': [
'gn', 'debug', 'x64', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_x64': [
@ -204,12 +225,24 @@
# GN debug configs for simulators.
'gn_debug_simulate_arm': [
'gn', 'debug_bot', 'simulate_arm', 'swarming'],
'gn_debug_simulate_arm_asan_edge': [
'gn', 'debug_bot', 'simulate_arm', 'asan', 'edge'],
'gn_debug_simulate_arm64': [
'gn', 'debug_bot', 'simulate_arm64', 'swarming'],
'gn_debug_simulate_arm64_asan_edge': [
'gn', 'debug_bot', 'simulate_arm64', 'asan', 'lsan', 'edge'],
'gn_debug_simulate_arm64_no_snap': [
'gn', 'debug_bot', 'simulate_arm64', 'swarming', 'v8_snapshot_none'],
'gn_debug_simulate_mipsel_asan_edge': [
'gn', 'debug_bot', 'simulate_mipsel', 'asan', 'edge'],
# GN release configs for simulators.
'gn_release_simulate_arm': [
'gn', 'release_bot', 'simulate_arm', 'swarming'],
'gn_release_simulate_arm_trybot': [
'gn', 'release_trybot', 'simulate_arm', 'swarming'],
'gn_release_simulate_arm64': [
'gn', 'release_bot', 'simulate_arm64', 'swarming'],
'gn_release_simulate_arm64_msan': [
@ -219,12 +252,44 @@
'swarming'],
'gn_release_simulate_arm64_trybot': [
'gn', 'release_trybot', 'simulate_arm64', 'swarming'],
'gn_release_simulate_mipsel': [
'gn', 'release_bot', 'simulate_mipsel', 'swarming'],
'gn_release_simulate_mips64el': [
'gn', 'release_bot', 'simulate_mips64el', 'swarming'],
# GN debug configs for arm.
'gn_debug_arm': [
'gn', 'debug_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
# GN release configs for arm.
'gn_release_arm': [
'gn', 'release_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
'gn_release_android_arm': [
'gn', 'release_bot', 'arm', 'android', 'crosscompile',
'minimal_symbols', 'swarming'],
'gn_release_android_arm64': [
'gn', 'release_bot', 'arm64', 'android', 'crosscompile',
'minimal_symbols', 'swarming'],
# GN release configs for x64.
'gn_release_x64': [
'gn', 'release_bot', 'x64', 'swarming'],
'gn_release_x64_asan': [
'gn', 'release_bot', 'x64', 'asan', 'lsan', 'swarming'],
'gn_release_x64_asan_minimal_symbols': [
'gn', 'release_bot', 'x64', 'asan', 'lsan', 'minimal_symbols',
'swarming'],
'gn_release_x64_asan_no_lsan': [
'gn', 'release_bot', 'x64', 'asan', 'swarming'],
'gn_release_x64_asan_symbolized_edge_verify_heap': [
'gn', 'release_bot', 'x64', 'asan', 'edge', 'lsan', 'symbolized',
'v8_verify_heap'],
'gn_release_x64_clang': [
'gn', 'release_bot', 'x64', 'clang', 'swarming'],
'gn_release_x64_internal': [
'gn', 'release_bot', 'x64', 'swarming', 'v8_snapshot_internal'],
'gn_release_x64_minimal_symbols': [
'gn', 'release_bot', 'x64', 'minimal_symbols', 'swarming'],
'gn_release_x64_trybot': [
'gn', 'release_trybot', 'x64', 'swarming'],
'gn_release_x64_tsan': [
@ -235,6 +300,8 @@
# GN debug configs for x64.
'gn_debug_x64': [
'gn', 'debug_bot', 'x64', 'swarming'],
'gn_debug_x64_asan_edge': [
'gn', 'debug_bot', 'x64', 'asan', 'lsan', 'edge'],
'gn_debug_x64_custom': [
'gn', 'debug_bot', 'x64', 'swarming', 'v8_snapshot_custom'],
'gn_debug_x64_minimal_symbols': [
@ -247,6 +314,10 @@
# GN debug configs for x86.
'gn_debug_x86': [
'gn', 'debug_bot', 'x86', 'swarming'],
'gn_debug_x86_minimal_symbols': [
'gn', 'debug_bot', 'x86', 'minimal_symbols', 'swarming'],
'gn_debug_x86_no_i18n': [
'gn', 'debug_bot', 'x86', 'v8_no_i18n'],
'gn_debug_x86_no_snap': [
'gn', 'debug_bot', 'x86', 'swarming', 'v8_snapshot_none'],
'gn_debug_x86_no_snap_trybot': [
@ -267,8 +338,13 @@
'gn', 'release_trybot', 'x86', 'gcmole', 'swarming'],
'gn_release_x86_minimal_symbols': [
'gn', 'release_bot', 'x86', 'minimal_symbols', 'swarming'],
'gn_release_x86_no_i18n_trybot': [
'gn', 'release_trybot', 'x86', 'swarming', 'v8_no_i18n'],
'gn_release_x86_no_snap': [
'gn', 'release_bot', 'x86', 'swarming', 'v8_snapshot_none'],
'gn_release_x86_no_snap_shared_minimal_symbols': [
'gn', 'release', 'x86', 'goma', 'minimal_symbols', 'shared', 'swarming',
'v8_snapshot_none'],
'gn_release_x86_no_snap_trybot': [
'gn', 'release_trybot', 'x86', 'swarming', 'v8_snapshot_none'],
'gn_release_x86_shared_verify_heap': [
@ -276,65 +352,25 @@
'gn_release_x86_trybot': [
'gn', 'release_trybot', 'x86', 'swarming'],
# Gyp debug configs for arm.
'gyp_debug_arm': [
'gyp', 'debug_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
# Gyp debug configs for simulators.
'gyp_debug_simulate_arm': [
'gyp', 'debug_bot', 'simulate_arm', 'swarming'],
'gyp_debug_simulate_arm_asan_edge': [
'gyp', 'debug_bot', 'simulate_arm', 'asan', 'edge'],
'gyp_debug_simulate_arm64_asan_edge': [
'gyp', 'debug_bot', 'simulate_arm64', 'asan', 'lsan', 'edge'],
'gyp_debug_simulate_mipsel_asan_edge': [
'gyp', 'debug_bot', 'simulate_mipsel', 'asan', 'edge'],
'gyp_debug_simulate_x87_no_snap': [
'gyp', 'debug_bot', 'simulate_x87', 'swarming', 'v8_snapshot_none'],
# Gyp debug configs for x64.
'gyp_debug_x64_asan_edge': [
'gyp', 'debug_bot', 'x64', 'asan', 'lsan', 'edge'],
'gyp_debug_x64_minimal_symbols': [
'gyp', 'debug_bot', 'x64', 'minimal_symbols', 'swarming'],
# Gyp debug configs for x86.
'gyp_debug_x86': [
'gyp', 'debug_bot', 'x86', 'swarming'],
'gyp_debug_x86_minimal_symbols': [
'gyp', 'debug_bot', 'x86', 'minimal_symbols', 'swarming'],
'gyp_debug_x86_trybot': [
'gyp', 'debug_trybot', 'x86', 'swarming'],
'gyp_debug_x86_no_i18n': [
'gyp', 'debug_bot', 'x86', 'v8_no_i18n'],
'gyp_debug_x86_vtunejit': [
'gyp', 'debug_bot', 'x86', 'v8_enable_vtunejit'],
'gyp_full_debug_x86': [
'gyp', 'debug', 'x86', 'goma', 'static', 'v8_enable_slow_dchecks',
'v8_full_debug'],
# Gyp release configs for arm.
'gyp_release_arm': [
'gyp', 'release_bot', 'arm', 'crosscompile', 'hard_float', 'swarming'],
'gyp_release_android_arm': [
'gyp', 'release_bot', 'arm', 'android', 'crosscompile', 'swarming'],
'gyp_release_android_arm64': [
'gyp', 'release_bot', 'arm64', 'android', 'crosscompile', 'swarming'],
# Gyp release configs for mips.
'gyp_release_mips_no_snap_no_i18n': [
'gyp', 'release', 'mips', 'crosscompile', 'static', 'v8_no_i18n',
'v8_snapshot_none'],
# Gyp release configs for simulators.
'gyp_release_simulate_arm': [
'gyp', 'release_bot', 'simulate_arm', 'swarming'],
'gyp_release_simulate_arm_trybot': [
'gyp', 'release_trybot', 'simulate_arm', 'swarming'],
'gyp_release_simulate_mipsel': [
'gyp', 'release_bot', 'simulate_mipsel', 'swarming'],
'gyp_release_simulate_mips64el': [
'gyp', 'release_bot', 'simulate_mips64el', 'swarming'],
'gyp_release_simulate_ppc': [
'gyp', 'release_bot', 'simulate_ppc', 'swarming'],
'gyp_release_simulate_ppc64': [
@ -347,44 +383,21 @@
# Gyp release configs for x64.
'gyp_release_x64': [
'gyp', 'release_bot', 'x64', 'swarming'],
'gyp_release_x64_asan': [
'gyp', 'release_bot', 'x64', 'asan', 'lsan', 'swarming'],
'gyp_release_x64_asan_minimal_symbols': [
'gyp', 'release_bot', 'x64', 'asan', 'lsan', 'minimal_symbols',
'swarming'],
'gyp_release_x64_asan_minimal_symbols_coverage': [
'gyp', 'release_bot', 'x64', 'asan', 'bb', 'coverage', 'lsan',
'minimal_symbols', 'swarming'],
'gyp_release_x64_asan_symbolized_edge_verify_heap': [
'gyp', 'release_bot', 'x64', 'asan', 'edge', 'lsan', 'symbolized',
'v8_verify_heap'],
'gyp_release_x64_cfi_symbolized': [
'gyp', 'release_bot', 'x64', 'cfi', 'swarming', 'symbolized'],
'gyp_release_x64_clang': [
'gyp', 'release_bot', 'x64', 'clang', 'swarming'],
'gyp_release_x64_gcc_coverage': [
'gyp', 'release_bot', 'x64', 'coverage', 'gcc'],
'gyp_release_x64_minimal_symbols': [
'gyp', 'release_bot', 'x64', 'minimal_symbols', 'swarming'],
'gyp_release_x64_trybot': [
'gyp', 'release_trybot', 'x64', 'swarming'],
# Gyp release configs for x86.
'gyp_release_x86_disassembler': [
'gyp', 'release_bot', 'x86', 'v8_enable_disassembler'],
'gyp_release_x86_interpreted_regexp': [
'gyp', 'release_bot', 'x86', 'v8_interpreted_regexp'],
'gyp_release_x86_minimal_symbols': [
'gyp', 'release_bot', 'x86', 'minimal_symbols', 'swarming'],
'gyp_release_x86_no_i18n_trybot': [
'gyp', 'release_trybot', 'x86', 'swarming', 'v8_no_i18n'],
'gyp_release_x86_no_snap_shared_minimal_symbols': [
'gyp', 'release', 'x86', 'goma', 'minimal_symbols', 'shared', 'swarming',
'v8_snapshot_none'],
'gyp_release_x86_predictable': [
'gyp', 'release_bot', 'x86', 'v8_enable_verify_predictable'],
'gyp_release_x86_trybot': [
'gyp', 'release_trybot', 'x86', 'swarming'],
},
'mixins': {
@ -444,7 +457,7 @@
'debug_bot': {
'mixins': [
'debug', 'static', 'goma', 'v8_enable_slow_dchecks',
'debug', 'shared', 'goma', 'v8_enable_slow_dchecks',
'v8_optimized_debug'],
},
@ -533,7 +546,8 @@
},
'simulate_mipsel': {
'gn_args': 'target_cpu="x86" v8_target_cpu="mipsel"',
'gn_args':
'target_cpu="x86" v8_target_cpu="mipsel" mips_arch_variant="r2"',
'gyp_defines': 'target_arch=ia32 v8_target_arch=mipsel',
},
@ -577,9 +591,9 @@
'gyp_defines': 'test_isolation_mode=prepare',
},
# TODO(machenbach): Remove the symbolized config after the bots are gone.
# TODO(machenbach): Remove the symbolized config after the bots are gone.
'symbolized': {
'gn_args': 'symbolized=true',
'gn_args': 'v8_no_inline=true',
'gyp_defines':
'release_extra_cflags="-fno-inline-functions -fno-inline"',
},
@ -595,8 +609,8 @@
},
'v8_no_i18n': {
'gn_args': 'v8_enable_i18n_support=false',
'gyp_defines': 'v8_enable_i18n_support=0',
'gn_args': 'v8_enable_i18n_support=false icu_use_data_file=false',
'gyp_defines': 'v8_enable_i18n_support=0 icu_use_data_file_flag=0',
},
'v8_enable_disassembler': {

2
deps/v8/src/address-map.cc поставляемый
Просмотреть файл

@ -13,7 +13,7 @@ namespace internal {
RootIndexMap::RootIndexMap(Isolate* isolate) {
map_ = isolate->root_index_map();
if (map_ != NULL) return;
map_ = new base::HashMap(base::HashMap::PointersMatch);
map_ = new base::HashMap();
for (uint32_t i = 0; i < Heap::kStrongRootListLength; i++) {
Heap::RootListIndex root_index = static_cast<Heap::RootListIndex>(i);
Object* root = isolate->heap()->root(root_index);

4
deps/v8/src/address-map.h поставляемый
Просмотреть файл

@ -189,9 +189,7 @@ class SerializerReference {
class SerializerReferenceMap : public AddressMapBase {
public:
SerializerReferenceMap()
: no_allocation_(),
map_(base::HashMap::PointersMatch),
attached_reference_index_(0) {}
: no_allocation_(), map_(), attached_reference_index_(0) {}
SerializerReference Lookup(HeapObject* obj) {
base::HashMap::Entry* entry = LookupEntry(&map_, obj, false);

6
deps/v8/src/allocation.h поставляемый
Просмотреть файл

@ -13,10 +13,10 @@ namespace internal {
// Called when allocation routines fail to allocate.
// This function should not return, but should terminate the current
// processing.
void FatalProcessOutOfMemory(const char* message);
V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(const char* message);
// Superclass for classes managed with new & delete.
class Malloced {
class V8_EXPORT_PRIVATE Malloced {
public:
void* operator new(size_t size) { return New(size); }
void operator delete(void* p) { Delete(p); }
@ -72,7 +72,7 @@ void DeleteArray(T* array) {
// The normal strdup functions use malloc. These versions of StrDup
// and StrNDup uses new and calls the FatalProcessOutOfMemory handler
// if allocation fails.
char* StrDup(const char* str);
V8_EXPORT_PRIVATE char* StrDup(const char* str);
char* StrNDup(const char* str, int n);

41
deps/v8/src/api-arguments-inl.h поставляемый
Просмотреть файл

@ -20,8 +20,6 @@ namespace internal {
Handle<Name> name) { \
Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Function); \
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( \
isolate, &tracing::TraceEventStatsTable::Function); \
VMState<EXTERNAL> state(isolate); \
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f)); \
PropertyCallbackInfo<ApiReturn> info(begin()); \
@ -46,8 +44,6 @@ FOR_EACH_CALLBACK_TABLE_MAPPING_1_NAME(WRITE_CALL_1_NAME)
uint32_t index) { \
Isolate* isolate = this->isolate(); \
RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Function); \
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( \
isolate, &tracing::TraceEventStatsTable::Function); \
VMState<EXTERNAL> state(isolate); \
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f)); \
PropertyCallbackInfo<ApiReturn> info(begin()); \
@ -68,9 +64,6 @@ Handle<Object> PropertyCallbackArguments::Call(
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(
isolate, &RuntimeCallStats::GenericNamedPropertySetterCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate,
&tracing::TraceEventStatsTable::GenericNamedPropertySetterCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<v8::Value> info(begin());
@ -80,14 +73,27 @@ Handle<Object> PropertyCallbackArguments::Call(
return GetReturnValue<Object>(isolate);
}
Handle<Object> PropertyCallbackArguments::Call(
GenericNamedPropertyDefinerCallback f, Handle<Name> name,
const v8::PropertyDescriptor& desc) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(
isolate, &RuntimeCallStats::GenericNamedPropertyDefinerCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<v8::Value> info(begin());
LOG(isolate,
ApiNamedPropertyAccess("interceptor-named-define", holder(), *name));
f(v8::Utils::ToLocal(name), desc, info);
return GetReturnValue<Object>(isolate);
}
Handle<Object> PropertyCallbackArguments::Call(IndexedPropertySetterCallback f,
uint32_t index,
Handle<Object> value) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
&RuntimeCallStats::IndexedPropertySetterCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate, &tracing::TraceEventStatsTable::IndexedPropertySetterCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<v8::Value> info(begin());
@ -97,13 +103,26 @@ Handle<Object> PropertyCallbackArguments::Call(IndexedPropertySetterCallback f,
return GetReturnValue<Object>(isolate);
}
Handle<Object> PropertyCallbackArguments::Call(
IndexedPropertyDefinerCallback f, uint32_t index,
const v8::PropertyDescriptor& desc) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(
isolate, &RuntimeCallStats::IndexedPropertyDefinerCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<v8::Value> info(begin());
LOG(isolate,
ApiIndexedPropertyAccess("interceptor-indexed-define", holder(), index));
f(index, desc, info);
return GetReturnValue<Object>(isolate);
}
void PropertyCallbackArguments::Call(AccessorNameSetterCallback f,
Handle<Name> name, Handle<Object> value) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate,
&RuntimeCallStats::AccessorNameSetterCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate, &tracing::TraceEventStatsTable::AccessorNameSetterCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<void> info(begin());

4
deps/v8/src/api-arguments.cc поставляемый
Просмотреть файл

@ -13,8 +13,6 @@ namespace internal {
Handle<Object> FunctionCallbackArguments::Call(FunctionCallback f) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::FunctionCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate, &internal::tracing::TraceEventStatsTable::FunctionCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
FunctionCallbackInfo<v8::Value> info(begin(), argv_, argc_);
@ -26,8 +24,6 @@ Handle<JSObject> PropertyCallbackArguments::Call(
IndexedPropertyEnumeratorCallback f) {
Isolate* isolate = this->isolate();
RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::PropertyCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate, &internal::tracing::TraceEventStatsTable::PropertyCallback);
VMState<EXTERNAL> state(isolate);
ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f));
PropertyCallbackInfo<v8::Array> info(begin());

7
deps/v8/src/api-arguments.h поставляемый
Просмотреть файл

@ -119,9 +119,16 @@ class PropertyCallbackArguments
inline Handle<Object> Call(GenericNamedPropertySetterCallback f,
Handle<Name> name, Handle<Object> value);
inline Handle<Object> Call(GenericNamedPropertyDefinerCallback f,
Handle<Name> name,
const v8::PropertyDescriptor& desc);
inline Handle<Object> Call(IndexedPropertySetterCallback f, uint32_t index,
Handle<Object> value);
inline Handle<Object> Call(IndexedPropertyDefinerCallback f, uint32_t index,
const v8::PropertyDescriptor& desc);
inline void Call(AccessorNameSetterCallback f, Handle<Name> name,
Handle<Object> value);

92
deps/v8/src/api-natives.cc поставляемый
Просмотреть файл

@ -17,42 +17,39 @@ namespace {
class InvokeScope {
public:
explicit InvokeScope(Isolate* isolate) : save_context_(isolate) {}
explicit InvokeScope(Isolate* isolate)
: isolate_(isolate), save_context_(isolate) {}
~InvokeScope() {
Isolate* isolate = save_context_.isolate();
bool has_exception = isolate->has_pending_exception();
bool has_exception = isolate_->has_pending_exception();
if (has_exception) {
isolate->ReportPendingMessages();
isolate_->ReportPendingMessages();
} else {
isolate->clear_pending_message();
isolate_->clear_pending_message();
}
}
private:
Isolate* isolate_;
SaveContext save_context_;
};
enum class CacheCheck { kCheck, kSkip };
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> data,
Handle<JSReceiver> new_target,
bool is_hidden_prototype);
MaybeHandle<JSObject> InstantiateObject(
Isolate* isolate, Handle<ObjectTemplateInfo> data,
Handle<JSReceiver> new_target, CacheCheck cache_check = CacheCheck::kCheck,
bool is_hidden_prototype = false);
MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> data,
CacheCheck cache_check = CacheCheck::kCheck,
Handle<Name> name = Handle<Name>());
MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
Handle<FunctionTemplateInfo> data,
Handle<Name> name = Handle<Name>());
MaybeHandle<Object> Instantiate(Isolate* isolate, Handle<Object> data,
Handle<Name> name = Handle<Name>()) {
if (data->IsFunctionTemplateInfo()) {
return InstantiateFunction(isolate,
Handle<FunctionTemplateInfo>::cast(data),
CacheCheck::kCheck, name);
Handle<FunctionTemplateInfo>::cast(data), name);
} else if (data->IsObjectTemplateInfo()) {
return InstantiateObject(isolate, Handle<ObjectTemplateInfo>::cast(data),
Handle<JSReceiver>());
Handle<JSReceiver>(), false);
} else {
return data;
}
@ -199,15 +196,14 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
Handle<FixedArray> array =
isolate->factory()->NewFixedArray(max_number_of_properties);
info = *data;
while (info != nullptr) {
for (Handle<TemplateInfoT> temp(*data); *temp != nullptr;
temp = handle(temp->GetParent(isolate), isolate)) {
// Accumulate accessors.
Object* maybe_properties = info->property_accessors();
Object* maybe_properties = temp->property_accessors();
if (!maybe_properties->IsUndefined(isolate)) {
valid_descriptors = AccessorInfo::AppendUnique(
handle(maybe_properties, isolate), array, valid_descriptors);
}
info = info->GetParent(isolate);
}
// Install accumulated accessors.
@ -339,17 +335,9 @@ bool IsSimpleInstantiation(Isolate* isolate, ObjectTemplateInfo* info,
return fun->context()->native_context() == isolate->raw_native_context();
}
MaybeHandle<JSObject> InstantiateObjectWithInvokeScope(
Isolate* isolate, Handle<ObjectTemplateInfo> info,
Handle<JSReceiver> new_target) {
InvokeScope invoke_scope(isolate);
return InstantiateObject(isolate, info, new_target, CacheCheck::kSkip);
}
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> info,
Handle<JSReceiver> new_target,
CacheCheck cache_check,
bool is_hidden_prototype) {
Handle<JSFunction> constructor;
int serial_number = Smi::cast(info->serial_number())->value();
@ -363,7 +351,7 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
}
// Fast path.
Handle<JSObject> result;
if (serial_number && cache_check == CacheCheck::kCheck) {
if (serial_number) {
if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
return isolate->factory()->CopyJSObject(result);
}
@ -397,6 +385,7 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
if (info->immutable_proto()) {
JSObject::SetImmutableProto(object);
}
// TODO(dcarney): is this necessary?
JSObject::MigrateSlowToFast(result, 0, "ApiNatives::InstantiateObject");
if (serial_number) {
@ -406,18 +395,12 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
return result;
}
MaybeHandle<JSFunction> InstantiateFunctionWithInvokeScope(
Isolate* isolate, Handle<FunctionTemplateInfo> info) {
InvokeScope invoke_scope(isolate);
return InstantiateFunction(isolate, info, CacheCheck::kSkip);
}
MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
Handle<FunctionTemplateInfo> data,
CacheCheck cache_check,
Handle<Name> name) {
int serial_number = Smi::cast(data->serial_number())->value();
if (serial_number && cache_check == CacheCheck::kCheck) {
if (serial_number) {
Handle<JSObject> result;
if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
return Handle<JSFunction>::cast(result);
@ -434,8 +417,7 @@ MaybeHandle<JSFunction> InstantiateFunction(Isolate* isolate,
InstantiateObject(
isolate,
handle(ObjectTemplateInfo::cast(prototype_templ), isolate),
Handle<JSReceiver>(), CacheCheck::kCheck,
data->hidden_prototype()),
Handle<JSReceiver>(), data->hidden_prototype()),
JSFunction);
}
Object* parent = data->parent_template();
@ -505,31 +487,17 @@ void AddPropertyToPropertyList(Isolate* isolate, Handle<TemplateInfo> templ,
} // namespace
MaybeHandle<JSFunction> ApiNatives::InstantiateFunction(
Handle<FunctionTemplateInfo> info) {
Isolate* isolate = info->GetIsolate();
int serial_number = Smi::cast(info->serial_number())->value();
if (serial_number) {
Handle<JSObject> result;
if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
return Handle<JSFunction>::cast(result);
}
}
return InstantiateFunctionWithInvokeScope(isolate, info);
Handle<FunctionTemplateInfo> data) {
Isolate* isolate = data->GetIsolate();
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateFunction(isolate, data);
}
MaybeHandle<JSObject> ApiNatives::InstantiateObject(
Handle<ObjectTemplateInfo> info, Handle<JSReceiver> new_target) {
Isolate* isolate = info->GetIsolate();
int serial_number = Smi::cast(info->serial_number())->value();
if (serial_number && !new_target.is_null() &&
IsSimpleInstantiation(isolate, *info, *new_target)) {
// Fast path.
Handle<JSObject> result;
if (ProbeInstantiationsCache(isolate, serial_number).ToHandle(&result)) {
return isolate->factory()->CopyJSObject(result);
}
}
return InstantiateObjectWithInvokeScope(isolate, info, new_target);
Handle<ObjectTemplateInfo> data, Handle<JSReceiver> new_target) {
Isolate* isolate = data->GetIsolate();
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateObject(isolate, data, new_target, false);
}
MaybeHandle<JSObject> ApiNatives::InstantiateRemoteObject(

603
deps/v8/src/api.cc поставляемый
Просмотреть файл

@ -24,6 +24,7 @@
#include "src/base/functional.h"
#include "src/base/platform/platform.h"
#include "src/base/platform/time.h"
#include "src/base/safe_conversions.h"
#include "src/base/utils/random-number-generator.h"
#include "src/bootstrapper.h"
#include "src/char-predicates-inl.h"
@ -68,6 +69,7 @@
#include "src/unicode-inl.h"
#include "src/v8.h"
#include "src/v8threads.h"
#include "src/value-serializer.h"
#include "src/version.h"
#include "src/vm-state-inl.h"
#include "src/wasm/wasm-module.h"
@ -77,9 +79,6 @@ namespace v8 {
#define LOG_API(isolate, class_name, function_name) \
i::RuntimeCallTimerScope _runtime_timer( \
isolate, &i::RuntimeCallStats::API_##class_name##_##function_name); \
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( \
isolate, &internal::tracing::TraceEventStatsTable:: \
API_##class_name##_##function_name); \
LOG(isolate, ApiEntryCall("v8::" #class_name "::" #function_name))
#define ENTER_V8(isolate) i::VMState<v8::OTHER> __state__((isolate))
@ -105,6 +104,16 @@ namespace v8 {
PREPARE_FOR_EXECUTION_GENERIC(isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, do_callback);
#define PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE( \
category, name, context, class_name, function_name, bailout_value, \
HandleScopeClass, do_callback) \
auto isolate = context.IsEmpty() \
? i::Isolate::Current() \
: reinterpret_cast<i::Isolate*>(context->GetIsolate()); \
TRACE_EVENT_CALL_STATS_SCOPED(isolate, category, name); \
PREPARE_FOR_EXECUTION_GENERIC(isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, do_callback);
#define PREPARE_FOR_EXECUTION_WITH_ISOLATE(isolate, class_name, function_name, \
T) \
PREPARE_FOR_EXECUTION_GENERIC(isolate, Local<Context>(), class_name, \
@ -126,6 +135,10 @@ namespace v8 {
PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \
Nothing<T>(), i::HandleScope, false)
#define PREPARE_FOR_EXECUTION_BOOL(context, class_name, function_name) \
PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \
false, i::HandleScope, false)
#define EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, value) \
do { \
if (has_pending_exception) { \
@ -142,6 +155,8 @@ namespace v8 {
#define RETURN_ON_FAILED_EXECUTION_PRIMITIVE(T) \
EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, Nothing<T>())
#define RETURN_ON_FAILED_EXECUTION_BOOL() \
EXCEPTION_BAILOUT_CHECK_SCOPED(isolate, false)
#define RETURN_TO_LOCAL_UNCHECKED(maybe_local, T) \
return maybe_local.FromMaybe(Local<T>());
@ -513,7 +528,8 @@ StartupData SnapshotCreator::CreateBlob(
// If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of the context.
isolate->heap()->CollectAllAvailableGarbage("mksnapshot");
isolate->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kSnapshotCreator);
isolate->heap()->CompactWeakFixedArrays();
i::DisallowHeapAllocation no_gc_from_here_on;
@ -770,11 +786,6 @@ i::Object** V8::CopyPersistent(i::Object** obj) {
return result.location();
}
void V8::RegisterExternallyReferencedObject(i::Object** object,
i::Isolate* isolate) {
isolate->heap()->RegisterExternallyReferencedObject(object);
}
void V8::MakeWeak(i::Object** location, void* parameter,
int internal_field_index1, int internal_field_index2,
WeakCallbackInfo<void>::Callback weak_callback) {
@ -1503,12 +1514,17 @@ void ObjectTemplate::SetAccessor(v8::Local<Name> name,
signature, i::FLAG_disable_old_api_accessors);
}
template <typename Getter, typename Setter, typename Query, typename Deleter,
typename Enumerator>
template <typename Getter, typename Setter, typename Query, typename Descriptor,
typename Deleter, typename Enumerator, typename Definer>
static i::Handle<i::InterceptorInfo> CreateInterceptorInfo(
i::Isolate* isolate, Getter getter, Setter setter, Query query,
Deleter remover, Enumerator enumerator, Local<Value> data,
PropertyHandlerFlags flags) {
Descriptor descriptor, Deleter remover, Enumerator enumerator,
Definer definer, Local<Value> data, PropertyHandlerFlags flags) {
DCHECK(query == nullptr ||
descriptor == nullptr); // Either intercept attributes or descriptor.
DCHECK(query == nullptr ||
definer ==
nullptr); // Only use descriptor callback with definer callback.
auto obj = i::Handle<i::InterceptorInfo>::cast(
isolate->factory()->NewStruct(i::INTERCEPTOR_INFO_TYPE));
obj->set_flags(0);
@ -1516,8 +1532,10 @@ static i::Handle<i::InterceptorInfo> CreateInterceptorInfo(
if (getter != 0) SET_FIELD_WRAPPED(obj, set_getter, getter);
if (setter != 0) SET_FIELD_WRAPPED(obj, set_setter, setter);
if (query != 0) SET_FIELD_WRAPPED(obj, set_query, query);
if (descriptor != 0) SET_FIELD_WRAPPED(obj, set_descriptor, descriptor);
if (remover != 0) SET_FIELD_WRAPPED(obj, set_deleter, remover);
if (enumerator != 0) SET_FIELD_WRAPPED(obj, set_enumerator, enumerator);
if (definer != 0) SET_FIELD_WRAPPED(obj, set_definer, definer);
obj->set_can_intercept_symbols(
!(static_cast<int>(flags) &
static_cast<int>(PropertyHandlerFlags::kOnlyInterceptStrings)));
@ -1533,40 +1551,37 @@ static i::Handle<i::InterceptorInfo> CreateInterceptorInfo(
return obj;
}
template <typename Getter, typename Setter, typename Query, typename Deleter,
typename Enumerator>
static void ObjectTemplateSetNamedPropertyHandler(ObjectTemplate* templ,
Getter getter, Setter setter,
Query query, Deleter remover,
Enumerator enumerator,
Local<Value> data,
PropertyHandlerFlags flags) {
template <typename Getter, typename Setter, typename Query, typename Descriptor,
typename Deleter, typename Enumerator, typename Definer>
static void ObjectTemplateSetNamedPropertyHandler(
ObjectTemplate* templ, Getter getter, Setter setter, Query query,
Descriptor descriptor, Deleter remover, Enumerator enumerator,
Definer definer, Local<Value> data, PropertyHandlerFlags flags) {
i::Isolate* isolate = Utils::OpenHandle(templ)->GetIsolate();
ENTER_V8(isolate);
i::HandleScope scope(isolate);
auto cons = EnsureConstructor(isolate, templ);
EnsureNotInstantiated(cons, "ObjectTemplateSetNamedPropertyHandler");
auto obj = CreateInterceptorInfo(isolate, getter, setter, query, remover,
enumerator, data, flags);
auto obj = CreateInterceptorInfo(isolate, getter, setter, query, descriptor,
remover, enumerator, definer, data, flags);
cons->set_named_property_handler(*obj);
}
void ObjectTemplate::SetNamedPropertyHandler(
NamedPropertyGetterCallback getter, NamedPropertySetterCallback setter,
NamedPropertyQueryCallback query, NamedPropertyDeleterCallback remover,
NamedPropertyEnumeratorCallback enumerator, Local<Value> data) {
ObjectTemplateSetNamedPropertyHandler(
this, getter, setter, query, remover, enumerator, data,
this, getter, setter, query, nullptr, remover, enumerator, nullptr, data,
PropertyHandlerFlags::kOnlyInterceptStrings);
}
void ObjectTemplate::SetHandler(
const NamedPropertyHandlerConfiguration& config) {
ObjectTemplateSetNamedPropertyHandler(
this, config.getter, config.setter, config.query, config.deleter,
config.enumerator, config.data, config.flags);
this, config.getter, config.setter, config.query, config.descriptor,
config.deleter, config.enumerator, config.definer, config.data,
config.flags);
}
@ -1626,13 +1641,14 @@ void ObjectTemplate::SetAccessCheckCallbackAndHandler(
SET_FIELD_WRAPPED(info, set_callback, callback);
auto named_interceptor = CreateInterceptorInfo(
isolate, named_handler.getter, named_handler.setter, named_handler.query,
named_handler.deleter, named_handler.enumerator, named_handler.data,
named_handler.flags);
named_handler.descriptor, named_handler.deleter, named_handler.enumerator,
named_handler.definer, named_handler.data, named_handler.flags);
info->set_named_interceptor(*named_interceptor);
auto indexed_interceptor = CreateInterceptorInfo(
isolate, indexed_handler.getter, indexed_handler.setter,
indexed_handler.query, indexed_handler.deleter,
indexed_handler.enumerator, indexed_handler.data, indexed_handler.flags);
indexed_handler.query, indexed_handler.descriptor,
indexed_handler.deleter, indexed_handler.enumerator,
indexed_handler.definer, indexed_handler.data, indexed_handler.flags);
info->set_indexed_interceptor(*indexed_interceptor);
if (data.IsEmpty()) {
@ -1651,9 +1667,10 @@ void ObjectTemplate::SetHandler(
i::HandleScope scope(isolate);
auto cons = EnsureConstructor(isolate, this);
EnsureNotInstantiated(cons, "v8::ObjectTemplate::SetHandler");
auto obj = CreateInterceptorInfo(
isolate, config.getter, config.setter, config.query, config.deleter,
config.enumerator, config.data, config.flags);
auto obj = CreateInterceptorInfo(isolate, config.getter, config.setter,
config.query, config.descriptor,
config.deleter, config.enumerator,
config.definer, config.data, config.flags);
cons->set_indexed_property_handler(*obj);
}
@ -1834,17 +1851,19 @@ Local<Value> UnboundScript::GetSourceMappingURL() {
MaybeLocal<Value> Script::Run(Local<Context> context) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Script, Run, Value)
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Script, Run, MaybeLocal<Value>(),
InternalEscapableScope, true);
i::HistogramTimerScope execute_timer(isolate->counters()->execute(), true);
i::AggregatingHistogramTimerScope timer(isolate->counters()->compile_lazy());
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
auto fun = i::Handle<i::JSFunction>::cast(Utils::OpenHandle(this));
i::Handle<i::Object> receiver = isolate->global_proxy();
Local<Value> result;
has_pending_exception =
!ToLocal<Value>(i::Execution::Call(isolate, fun, receiver, 0, NULL),
&result);
has_pending_exception = !ToLocal<Value>(
i::Execution::Call(isolate, fun, receiver, 0, nullptr), &result);
RETURN_ON_FAILED_EXECUTION(Value);
RETURN_ESCAPED(result);
}
@ -1866,6 +1885,58 @@ Local<UnboundScript> Script::GetUnboundScript() {
i::Handle<i::SharedFunctionInfo>(i::JSFunction::cast(*obj)->shared()));
}
int Module::GetModuleRequestsLength() const {
i::Handle<i::Module> self = Utils::OpenHandle(this);
return self->info()->module_requests()->length();
}
Local<String> Module::GetModuleRequest(int i) const {
CHECK_GE(i, 0);
i::Handle<i::Module> self = Utils::OpenHandle(this);
i::Isolate* isolate = self->GetIsolate();
i::Handle<i::FixedArray> module_requests(self->info()->module_requests(),
isolate);
CHECK_LT(i, module_requests->length());
return ToApiHandle<String>(i::handle(module_requests->get(i), isolate));
}
void Module::SetEmbedderData(Local<Value> data) {
Utils::OpenHandle(this)->set_embedder_data(*Utils::OpenHandle(*data));
}
Local<Value> Module::GetEmbedderData() const {
auto self = Utils::OpenHandle(this);
return ToApiHandle<Value>(
i::handle(self->embedder_data(), self->GetIsolate()));
}
bool Module::Instantiate(Local<Context> context,
Module::ResolveCallback callback,
Local<Value> callback_data) {
PREPARE_FOR_EXECUTION_BOOL(context, Module, Instantiate);
has_pending_exception = !i::Module::Instantiate(
Utils::OpenHandle(this), context, callback, callback_data);
RETURN_ON_FAILED_EXECUTION_BOOL();
return true;
}
MaybeLocal<Value> Module::Evaluate(Local<Context> context) {
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Module, Evaluate, MaybeLocal<Value>(),
InternalEscapableScope, true);
i::HistogramTimerScope execute_timer(isolate->counters()->execute(), true);
i::AggregatingHistogramTimerScope timer(isolate->counters()->compile_lazy());
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
i::Handle<i::Module> self = Utils::OpenHandle(this);
// It's an API error to call Evaluate before Instantiate.
CHECK(self->code()->IsJSFunction());
Local<Value> result;
has_pending_exception = !ToLocal(i::Module::Evaluate(self), &result);
RETURN_ON_FAILED_EXECUTION(Value);
RETURN_ESCAPED(result);
}
MaybeLocal<UnboundScript> ScriptCompiler::CompileUnboundInternal(
Isolate* v8_isolate, Source* source, CompileOptions options,
@ -1976,16 +2047,16 @@ Local<Script> ScriptCompiler::Compile(
RETURN_TO_LOCAL_UNCHECKED(Compile(context, source, options), Script);
}
MaybeLocal<Module> ScriptCompiler::CompileModule(Isolate* isolate,
Source* source) {
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
MaybeLocal<Script> ScriptCompiler::CompileModule(Local<Context> context,
Source* source,
CompileOptions options) {
auto isolate = context->GetIsolate();
auto maybe = CompileUnboundInternal(isolate, source, options, true);
Local<UnboundScript> generic;
if (!maybe.ToLocal(&generic)) return MaybeLocal<Script>();
v8::Context::Scope scope(context);
return generic->BindToCurrentContext();
auto maybe = CompileUnboundInternal(isolate, source, kNoCompileOptions, true);
Local<UnboundScript> unbound;
if (!maybe.ToLocal(&unbound)) return MaybeLocal<Module>();
i::Handle<i::SharedFunctionInfo> shared = Utils::OpenHandle(*unbound);
return ToApiHandle<Module>(i_isolate->factory()->NewModule(shared));
}
@ -2084,7 +2155,13 @@ MaybeLocal<Function> ScriptCompiler::CompileFunctionInContext(
Utils::OpenHandle(*context_extensions[i]);
if (!extension->IsJSObject()) return Local<Function>();
i::Handle<i::JSFunction> closure(context->closure(), isolate);
context = factory->NewWithContext(closure, context, extension);
context = factory->NewWithContext(
closure, context,
i::ScopeInfo::CreateForWithScope(
isolate, context->IsNativeContext()
? i::Handle<i::ScopeInfo>::null()
: i::Handle<i::ScopeInfo>(context->scope_info())),
extension);
}
i::Handle<i::Object> name_obj;
@ -2138,6 +2215,9 @@ Local<Function> ScriptCompiler::CompileFunctionInContext(
ScriptCompiler::ScriptStreamingTask* ScriptCompiler::StartStreamingScript(
Isolate* v8_isolate, StreamedSource* source, CompileOptions options) {
if (!i::FLAG_script_streaming) {
return nullptr;
}
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
return new i::BackgroundParsingTask(source->impl(), options,
i::FLAG_stack_size, isolate);
@ -2171,17 +2251,19 @@ MaybeLocal<Script> ScriptCompiler::Compile(Local<Context> context,
}
source->info->set_script(script);
source->info->set_context(isolate->native_context());
// Create a canonical handle scope before internalizing parsed values if
// compiling bytecode. This is required for off-thread bytecode generation.
std::unique_ptr<i::CanonicalHandleScope> canonical;
if (i::FLAG_ignition) canonical.reset(new i::CanonicalHandleScope(isolate));
{
// Create a canonical handle scope if compiling ignition bytecode. This is
// required by the constant array builder to de-duplicate objects without
// dereferencing handles.
std::unique_ptr<i::CanonicalHandleScope> canonical;
if (i::FLAG_ignition) canonical.reset(new i::CanonicalHandleScope(isolate));
// Do the parsing tasks which need to be done on the main thread. This will
// also handle parse errors.
source->parser->Internalize(isolate, script,
source->info->literal() == nullptr);
// Do the parsing tasks which need to be done on the main thread. This will
// also handle parse errors.
source->parser->Internalize(isolate, script,
source->info->literal() == nullptr);
}
source->parser->HandleSourceURLComments(isolate, script);
i::Handle<i::SharedFunctionInfo> result;
@ -2192,9 +2274,10 @@ MaybeLocal<Script> ScriptCompiler::Compile(Local<Context> context,
}
has_pending_exception = result.is_null();
if (has_pending_exception) isolate->ReportPendingMessages();
RETURN_ON_FAILED_EXECUTION(Script);
source->info->clear_script(); // because script goes out of scope.
source->Release();
RETURN_ON_FAILED_EXECUTION(Script);
Local<UnboundScript> generic = ToApiHandle<UnboundScript>(result);
if (generic.IsEmpty()) return Local<Script>();
@ -2263,8 +2346,8 @@ v8::TryCatch::TryCatch()
ResetInternal();
// Special handling for simulators which have a separate JS stack.
js_stack_comparable_address_ =
reinterpret_cast<void*>(v8::internal::SimulatorStack::RegisterCTryCatch(
isolate_, v8::internal::GetCurrentStackPosition()));
reinterpret_cast<void*>(i::SimulatorStack::RegisterCTryCatch(
isolate_, i::GetCurrentStackPosition()));
isolate_->RegisterTryCatchHandler(this);
}
@ -2280,8 +2363,8 @@ v8::TryCatch::TryCatch(v8::Isolate* isolate)
ResetInternal();
// Special handling for simulators which have a separate JS stack.
js_stack_comparable_address_ =
reinterpret_cast<void*>(v8::internal::SimulatorStack::RegisterCTryCatch(
isolate_, v8::internal::GetCurrentStackPosition()));
reinterpret_cast<void*>(i::SimulatorStack::RegisterCTryCatch(
isolate_, i::GetCurrentStackPosition()));
isolate_->RegisterTryCatchHandler(this);
}
@ -2300,7 +2383,7 @@ v8::TryCatch::~TryCatch() {
isolate_->RestorePendingMessageFromTryCatch(this);
}
isolate_->UnregisterTryCatchHandler(this);
v8::internal::SimulatorStack::UnregisterCTryCatch(isolate_);
i::SimulatorStack::UnregisterCTryCatch(isolate_);
reinterpret_cast<Isolate*>(isolate_)->ThrowException(exc);
DCHECK(!isolate_->thread_local_top()->rethrowing_message_);
} else {
@ -2311,7 +2394,7 @@ v8::TryCatch::~TryCatch() {
isolate_->CancelScheduledExceptionFromTryCatch(this);
}
isolate_->UnregisterTryCatchHandler(this);
v8::internal::SimulatorStack::UnregisterCTryCatch(isolate_);
i::SimulatorStack::UnregisterCTryCatch(isolate_);
}
}
@ -2832,6 +2915,205 @@ MaybeLocal<String> JSON::Stringify(Local<Context> context,
RETURN_ESCAPED(result);
}
// --- V a l u e S e r i a l i z a t i o n ---
Maybe<bool> ValueSerializer::Delegate::WriteHostObject(Isolate* v8_isolate,
Local<Object> object) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
isolate->ScheduleThrow(*isolate->factory()->NewError(
isolate->error_function(), i::MessageTemplate::kDataCloneError,
Utils::OpenHandle(*object)));
return Nothing<bool>();
}
struct ValueSerializer::PrivateData {
explicit PrivateData(i::Isolate* i, ValueSerializer::Delegate* delegate)
: isolate(i), serializer(i, delegate) {}
i::Isolate* isolate;
i::ValueSerializer serializer;
};
ValueSerializer::ValueSerializer(Isolate* isolate)
: ValueSerializer(isolate, nullptr) {}
ValueSerializer::ValueSerializer(Isolate* isolate, Delegate* delegate)
: private_(
new PrivateData(reinterpret_cast<i::Isolate*>(isolate), delegate)) {}
ValueSerializer::~ValueSerializer() { delete private_; }
void ValueSerializer::WriteHeader() { private_->serializer.WriteHeader(); }
Maybe<bool> ValueSerializer::WriteValue(Local<Context> context,
Local<Value> value) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, ValueSerializer, WriteValue, bool);
i::Handle<i::Object> object = Utils::OpenHandle(*value);
Maybe<bool> result = private_->serializer.WriteObject(object);
has_pending_exception = result.IsNothing();
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return result;
}
std::vector<uint8_t> ValueSerializer::ReleaseBuffer() {
return private_->serializer.ReleaseBuffer();
}
void ValueSerializer::TransferArrayBuffer(uint32_t transfer_id,
Local<ArrayBuffer> array_buffer) {
private_->serializer.TransferArrayBuffer(transfer_id,
Utils::OpenHandle(*array_buffer));
}
void ValueSerializer::TransferSharedArrayBuffer(
uint32_t transfer_id, Local<SharedArrayBuffer> shared_array_buffer) {
private_->serializer.TransferArrayBuffer(
transfer_id, Utils::OpenHandle(*shared_array_buffer));
}
void ValueSerializer::WriteUint32(uint32_t value) {
private_->serializer.WriteUint32(value);
}
void ValueSerializer::WriteUint64(uint64_t value) {
private_->serializer.WriteUint64(value);
}
void ValueSerializer::WriteDouble(double value) {
private_->serializer.WriteDouble(value);
}
void ValueSerializer::WriteRawBytes(const void* source, size_t length) {
private_->serializer.WriteRawBytes(source, length);
}
MaybeLocal<Object> ValueDeserializer::Delegate::ReadHostObject(
Isolate* v8_isolate) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(v8_isolate);
isolate->ScheduleThrow(*isolate->factory()->NewError(
isolate->error_function(),
i::MessageTemplate::kDataCloneDeserializationError));
return MaybeLocal<Object>();
}
struct ValueDeserializer::PrivateData {
PrivateData(i::Isolate* i, i::Vector<const uint8_t> data, Delegate* delegate)
: isolate(i), deserializer(i, data, delegate) {}
i::Isolate* isolate;
i::ValueDeserializer deserializer;
bool has_aborted = false;
bool supports_legacy_wire_format = false;
};
ValueDeserializer::ValueDeserializer(Isolate* isolate, const uint8_t* data,
size_t size)
: ValueDeserializer(isolate, data, size, nullptr) {}
ValueDeserializer::ValueDeserializer(Isolate* isolate, const uint8_t* data,
size_t size, Delegate* delegate) {
if (base::IsValueInRangeForNumericType<int>(size)) {
private_ = new PrivateData(
reinterpret_cast<i::Isolate*>(isolate),
i::Vector<const uint8_t>(data, static_cast<int>(size)), delegate);
} else {
private_ = new PrivateData(reinterpret_cast<i::Isolate*>(isolate),
i::Vector<const uint8_t>(nullptr, 0), nullptr);
private_->has_aborted = true;
}
}
ValueDeserializer::~ValueDeserializer() { delete private_; }
Maybe<bool> ValueDeserializer::ReadHeader(Local<Context> context) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, ValueDeserializer, ReadHeader, bool);
// We could have aborted during the constructor.
// If so, ReadHeader is where we report it.
if (private_->has_aborted) {
isolate->Throw(*isolate->factory()->NewError(
i::MessageTemplate::kDataCloneDeserializationError));
has_pending_exception = true;
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
}
bool read_header = false;
has_pending_exception = !private_->deserializer.ReadHeader().To(&read_header);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
DCHECK(read_header);
// TODO(jbroman): Today, all wire formats are "legacy". When a more supported
// format is added, compare the version of the internal serializer to the
// minimum non-legacy version number.
if (!private_->supports_legacy_wire_format) {
isolate->Throw(*isolate->factory()->NewError(
i::MessageTemplate::kDataCloneDeserializationVersionError));
has_pending_exception = true;
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
}
return Just(true);
}
Maybe<bool> ValueDeserializer::ReadHeader() {
Isolate* isolate = reinterpret_cast<Isolate*>(private_->isolate);
return ReadHeader(isolate->GetEnteredContext());
}
void ValueDeserializer::SetSupportsLegacyWireFormat(
bool supports_legacy_wire_format) {
private_->supports_legacy_wire_format = supports_legacy_wire_format;
}
uint32_t ValueDeserializer::GetWireFormatVersion() const {
CHECK(!private_->has_aborted);
return private_->deserializer.GetWireFormatVersion();
}
MaybeLocal<Value> ValueDeserializer::ReadValue(Local<Context> context) {
CHECK(!private_->has_aborted);
PREPARE_FOR_EXECUTION(context, ValueDeserializer, ReadValue, Value);
i::MaybeHandle<i::Object> result;
if (GetWireFormatVersion() > 0) {
result = private_->deserializer.ReadObject();
} else {
result =
private_->deserializer.ReadObjectUsingEntireBufferForLegacyFormat();
}
Local<Value> value;
has_pending_exception = !ToLocal(result, &value);
RETURN_ON_FAILED_EXECUTION(Value);
RETURN_ESCAPED(value);
}
void ValueDeserializer::TransferArrayBuffer(uint32_t transfer_id,
Local<ArrayBuffer> array_buffer) {
CHECK(!private_->has_aborted);
private_->deserializer.TransferArrayBuffer(transfer_id,
Utils::OpenHandle(*array_buffer));
}
void ValueDeserializer::TransferSharedArrayBuffer(
uint32_t transfer_id, Local<SharedArrayBuffer> shared_array_buffer) {
CHECK(!private_->has_aborted);
private_->deserializer.TransferArrayBuffer(
transfer_id, Utils::OpenHandle(*shared_array_buffer));
}
bool ValueDeserializer::ReadUint32(uint32_t* value) {
return private_->deserializer.ReadUint32(value);
}
bool ValueDeserializer::ReadUint64(uint64_t* value) {
return private_->deserializer.ReadUint64(value);
}
bool ValueDeserializer::ReadDouble(double* value) {
return private_->deserializer.ReadDouble(value);
}
bool ValueDeserializer::ReadRawBytes(size_t length, const void** data) {
return private_->deserializer.ReadRawBytes(length, data);
}
// --- D a t a ---
bool Value::FullIsUndefined() const {
@ -3019,12 +3301,18 @@ bool Value::IsRegExp() const {
return obj->IsJSRegExp();
}
bool Value::IsAsyncFunction() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
if (!obj->IsJSFunction()) return false;
i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
return i::IsAsyncFunction(func->shared()->kind());
}
bool Value::IsGeneratorFunction() const {
i::Handle<i::Object> obj = Utils::OpenHandle(this);
if (!obj->IsJSFunction()) return false;
i::Handle<i::JSFunction> func = i::Handle<i::JSFunction>::cast(obj);
return func->shared()->is_generator();
return i::IsGeneratorFunction(func->shared()->kind());
}
@ -3662,6 +3950,98 @@ Maybe<bool> v8::Object::CreateDataProperty(v8::Local<v8::Context> context,
return result;
}
struct v8::PropertyDescriptor::PrivateData {
PrivateData() : desc() {}
i::PropertyDescriptor desc;
};
v8::PropertyDescriptor::PropertyDescriptor() : private_(new PrivateData()) {}
// DataDescriptor
v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> value)
: private_(new PrivateData()) {
private_->desc.set_value(Utils::OpenHandle(*value, true));
}
// DataDescriptor with writable field
v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> value,
bool writable)
: private_(new PrivateData()) {
private_->desc.set_value(Utils::OpenHandle(*value, true));
private_->desc.set_writable(writable);
}
// AccessorDescriptor
v8::PropertyDescriptor::PropertyDescriptor(v8::Local<v8::Value> get,
v8::Local<v8::Value> set)
: private_(new PrivateData()) {
DCHECK(get.IsEmpty() || get->IsUndefined() || get->IsFunction());
DCHECK(set.IsEmpty() || set->IsUndefined() || set->IsFunction());
private_->desc.set_get(Utils::OpenHandle(*get, true));
private_->desc.set_set(Utils::OpenHandle(*set, true));
}
v8::PropertyDescriptor::~PropertyDescriptor() { delete private_; }
v8::Local<Value> v8::PropertyDescriptor::value() const {
DCHECK(private_->desc.has_value());
return Utils::ToLocal(private_->desc.value());
}
v8::Local<Value> v8::PropertyDescriptor::get() const {
DCHECK(private_->desc.has_get());
return Utils::ToLocal(private_->desc.get());
}
v8::Local<Value> v8::PropertyDescriptor::set() const {
DCHECK(private_->desc.has_set());
return Utils::ToLocal(private_->desc.set());
}
bool v8::PropertyDescriptor::has_value() const {
return private_->desc.has_value();
}
bool v8::PropertyDescriptor::has_get() const {
return private_->desc.has_get();
}
bool v8::PropertyDescriptor::has_set() const {
return private_->desc.has_set();
}
bool v8::PropertyDescriptor::writable() const {
DCHECK(private_->desc.has_writable());
return private_->desc.writable();
}
bool v8::PropertyDescriptor::has_writable() const {
return private_->desc.has_writable();
}
void v8::PropertyDescriptor::set_enumerable(bool enumerable) {
private_->desc.set_enumerable(enumerable);
}
bool v8::PropertyDescriptor::enumerable() const {
DCHECK(private_->desc.has_enumerable());
return private_->desc.enumerable();
}
bool v8::PropertyDescriptor::has_enumerable() const {
return private_->desc.has_enumerable();
}
void v8::PropertyDescriptor::set_configurable(bool configurable) {
private_->desc.set_configurable(configurable);
}
bool v8::PropertyDescriptor::configurable() const {
DCHECK(private_->desc.has_configurable());
return private_->desc.configurable();
}
bool v8::PropertyDescriptor::has_configurable() const {
return private_->desc.has_configurable();
}
Maybe<bool> v8::Object::DefineOwnProperty(v8::Local<v8::Context> context,
v8::Local<Name> key,
@ -3672,13 +4052,6 @@ Maybe<bool> v8::Object::DefineOwnProperty(v8::Local<v8::Context> context,
i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
if (self->IsAccessCheckNeeded() &&
!isolate->MayAccess(handle(isolate->context()),
i::Handle<i::JSObject>::cast(self))) {
isolate->ReportFailedAccessCheck(i::Handle<i::JSObject>::cast(self));
return Nothing<bool>();
}
i::PropertyDescriptor desc;
desc.set_writable(!(attributes & v8::ReadOnly));
desc.set_enumerable(!(attributes & v8::DontEnum));
@ -3691,6 +4064,19 @@ Maybe<bool> v8::Object::DefineOwnProperty(v8::Local<v8::Context> context,
return success;
}
Maybe<bool> v8::Object::DefineProperty(v8::Local<v8::Context> context,
v8::Local<Name> key,
PropertyDescriptor& descriptor) {
PREPARE_FOR_EXECUTION_PRIMITIVE(context, Object, DefineProperty, bool);
i::Handle<i::JSReceiver> self = Utils::OpenHandle(this);
i::Handle<i::Name> key_obj = Utils::OpenHandle(*key);
Maybe<bool> success = i::JSReceiver::DefineOwnProperty(
isolate, self, key_obj, &descriptor.get_private()->desc,
i::Object::DONT_THROW);
RETURN_ON_FAILED_EXECUTION_PRIMITIVE(bool);
return success;
}
MUST_USE_RESULT
static i::MaybeHandle<i::Object> DefineObjectProperty(
@ -4408,9 +4794,10 @@ bool v8::Object::IsConstructor() {
MaybeLocal<Value> Object::CallAsFunction(Local<Context> context,
Local<Value> recv, int argc,
Local<Value> argv[]) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Object, CallAsFunction, Value);
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Object, CallAsFunction, MaybeLocal<Value>(),
InternalEscapableScope, true);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
auto recv_obj = Utils::OpenHandle(*recv);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@ -4434,10 +4821,10 @@ Local<v8::Value> Object::CallAsFunction(v8::Local<v8::Value> recv, int argc,
MaybeLocal<Value> Object::CallAsConstructor(Local<Context> context, int argc,
Local<Value> argv[]) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Object, CallAsConstructor,
Value);
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Object, CallAsConstructor,
MaybeLocal<Value>(), InternalEscapableScope, true);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@ -4485,9 +4872,10 @@ Local<v8::Object> Function::NewInstance() const {
MaybeLocal<Object> Function::NewInstance(Local<Context> context, int argc,
v8::Local<v8::Value> argv[]) const {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Function, NewInstance, Object);
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Function, NewInstance, MaybeLocal<Object>(),
InternalEscapableScope, true);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
i::Handle<i::Object>* args = reinterpret_cast<i::Handle<i::Object>*>(argv);
@ -4509,9 +4897,10 @@ Local<v8::Object> Function::NewInstance(int argc,
MaybeLocal<v8::Value> Function::Call(Local<Context> context,
v8::Local<v8::Value> recv, int argc,
v8::Local<v8::Value> argv[]) {
PREPARE_FOR_EXECUTION_WITH_CALLBACK(context, Function, Call, Value);
PREPARE_FOR_EXECUTION_WITH_CONTEXT_IN_RUNTIME_CALL_STATS_SCOPE(
"v8", "V8.Execute", context, Function, Call, MaybeLocal<Value>(),
InternalEscapableScope, true);
i::TimerEventScope<i::TimerEventExecute> timer_scope(isolate);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.Execute");
auto self = Utils::OpenHandle(this);
i::Handle<i::Object> recv_obj = Utils::OpenHandle(*recv);
STATIC_ASSERT(sizeof(v8::Local<v8::Value>) == sizeof(i::Object**));
@ -5708,8 +6097,8 @@ Local<Context> NewContext(v8::Isolate* external_isolate,
v8::MaybeLocal<Value> global_object,
size_t context_snapshot_index) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(external_isolate);
LOG_API(isolate, Context, New);
TRACE_EVENT_CALL_STATS_SCOPED(isolate, "v8", "V8.NewContext");
LOG_API(isolate, Context, New);
i::HandleScope scope(isolate);
ExtensionConfiguration no_extensions;
if (extensions == NULL) extensions = &no_extensions;
@ -6820,8 +7209,9 @@ MaybeLocal<WasmCompiledModule> WasmCompiledModule::Deserialize(
if (!maybe_compiled_part.ToHandle(&compiled_part)) {
return MaybeLocal<WasmCompiledModule>();
}
return Local<WasmCompiledModule>::Cast(Utils::ToLocal(
i::wasm::CreateCompiledModuleObject(i_isolate, compiled_part)));
return Local<WasmCompiledModule>::Cast(
Utils::ToLocal(i::wasm::CreateCompiledModuleObject(
i_isolate, compiled_part, i::wasm::ModuleOrigin::kWasmOrigin)));
}
// static
@ -7234,8 +7624,7 @@ Local<Integer> v8::Integer::NewFromUnsigned(Isolate* isolate, uint32_t value) {
void Isolate::ReportExternalAllocationLimitReached() {
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
heap->ReportExternalMemoryPressure(
"external memory allocation limit reached.");
heap->ReportExternalMemoryPressure();
}
@ -7303,27 +7692,24 @@ v8::Local<Value> Isolate::ThrowException(v8::Local<v8::Value> value) {
void Isolate::SetObjectGroupId(internal::Object** object, UniqueId id) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
internal_isolate->global_handles()->SetObjectGroupId(
v8::internal::Handle<v8::internal::Object>(object).location(),
id);
i::Handle<i::Object>(object).location(), id);
}
void Isolate::SetReferenceFromGroup(UniqueId id, internal::Object** object) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
internal_isolate->global_handles()->SetReferenceFromGroup(
id,
v8::internal::Handle<v8::internal::Object>(object).location());
id, i::Handle<i::Object>(object).location());
}
void Isolate::SetReference(internal::Object** parent,
internal::Object** child) {
i::Isolate* internal_isolate = reinterpret_cast<i::Isolate*>(this);
i::Object** parent_location =
v8::internal::Handle<v8::internal::Object>(parent).location();
i::Object** parent_location = i::Handle<i::Object>(parent).location();
internal_isolate->global_handles()->SetReference(
reinterpret_cast<i::HeapObject**>(parent_location),
v8::internal::Handle<v8::internal::Object>(child).location());
i::Handle<i::Object>(child).location());
}
@ -7398,13 +7784,13 @@ void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type) {
CHECK(i::FLAG_expose_gc);
if (type == kMinorGarbageCollection) {
reinterpret_cast<i::Isolate*>(this)->heap()->CollectGarbage(
i::NEW_SPACE, "Isolate::RequestGarbageCollection",
i::NEW_SPACE, i::GarbageCollectionReason::kTesting,
kGCCallbackFlagForced);
} else {
DCHECK_EQ(kFullGarbageCollection, type);
reinterpret_cast<i::Isolate*>(this)->heap()->CollectAllGarbage(
i::Heap::kAbortIncrementalMarkingMask,
"Isolate::RequestGarbageCollection", kGCCallbackFlagForced);
i::GarbageCollectionReason::kTesting, kGCCallbackFlagForced);
}
}
@ -7833,7 +8219,8 @@ void Isolate::LowMemoryNotification() {
i::HistogramTimerScope idle_notification_scope(
isolate->counters()->gc_low_memory_notification());
TRACE_EVENT0("v8", "V8.GCLowMemoryNotification");
isolate->heap()->CollectAllAvailableGarbage("low memory notification");
isolate->heap()->CollectAllAvailableGarbage(
i::GarbageCollectionReason::kLowMemoryNotification);
}
}
@ -7857,8 +8244,7 @@ void Isolate::IsolateInBackgroundNotification() {
void Isolate::MemoryPressureNotification(MemoryPressureLevel level) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
return isolate->heap()->MemoryPressureNotification(level,
Locker::IsLocked(this));
isolate->heap()->MemoryPressureNotification(level, Locker::IsLocked(this));
}
void Isolate::SetRAILMode(RAILMode rail_mode) {
@ -8325,6 +8711,10 @@ Local<String> CpuProfileNode::GetFunctionName() const {
}
}
const char* CpuProfileNode::GetFunctionNameStr() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
return node->entry()->name();
}
int CpuProfileNode::GetScriptId() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
@ -8332,7 +8722,6 @@ int CpuProfileNode::GetScriptId() const {
return entry->script_id();
}
Local<String> CpuProfileNode::GetScriptResourceName() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
i::Isolate* isolate = node->isolate();
@ -8340,6 +8729,10 @@ Local<String> CpuProfileNode::GetScriptResourceName() const {
node->entry()->resource_name()));
}
const char* CpuProfileNode::GetScriptResourceNameStr() const {
const i::ProfileNode* node = reinterpret_cast<const i::ProfileNode*>(this);
return node->entry()->resource_name();
}
int CpuProfileNode::GetLineNumber() const {
return reinterpret_cast<const i::ProfileNode*>(this)->entry()->line_number();
@ -8966,9 +9359,6 @@ void InvokeAccessorGetterCallback(
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
&RuntimeCallStats::AccessorGetterCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate,
&internal::tracing::TraceEventStatsTable::AccessorGetterCallback);
Address getter_address = reinterpret_cast<Address>(reinterpret_cast<intptr_t>(
getter));
VMState<EXTERNAL> state(isolate);
@ -8982,9 +9372,6 @@ void InvokeFunctionCallback(const v8::FunctionCallbackInfo<v8::Value>& info,
Isolate* isolate = reinterpret_cast<Isolate*>(info.GetIsolate());
RuntimeCallTimerScope timer(isolate,
&RuntimeCallStats::InvokeFunctionCallback);
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
isolate,
&internal::tracing::TraceEventStatsTable::InvokeFunctionCallback);
Address callback_address =
reinterpret_cast<Address>(reinterpret_cast<intptr_t>(callback));
VMState<EXTERNAL> state(isolate);

8
deps/v8/src/api.h поставляемый
Просмотреть файл

@ -69,7 +69,6 @@ class RegisteredExtension {
static RegisteredExtension* first_extension_;
};
#define OPEN_HANDLE_LIST(V) \
V(Template, TemplateInfo) \
V(FunctionTemplate, FunctionTemplateInfo) \
@ -101,6 +100,7 @@ class RegisteredExtension {
V(Symbol, Symbol) \
V(Script, JSFunction) \
V(UnboundScript, SharedFunctionInfo) \
V(Module, Module) \
V(Function, JSReceiver) \
V(Message, JSMessageObject) \
V(Context, Context) \
@ -124,6 +124,8 @@ class Utils {
v8::internal::Handle<v8::internal::Context> obj);
static inline Local<Value> ToLocal(
v8::internal::Handle<v8::internal::Object> obj);
static inline Local<Module> ToLocal(
v8::internal::Handle<v8::internal::Module> obj);
static inline Local<Name> ToLocal(
v8::internal::Handle<v8::internal::Name> obj);
static inline Local<String> ToLocal(
@ -136,6 +138,8 @@ class Utils {
v8::internal::Handle<v8::internal::JSReceiver> obj);
static inline Local<Object> ToLocal(
v8::internal::Handle<v8::internal::JSObject> obj);
static inline Local<Function> ToLocal(
v8::internal::Handle<v8::internal::JSFunction> obj);
static inline Local<Array> ToLocal(
v8::internal::Handle<v8::internal::JSArray> obj);
static inline Local<Map> ToLocal(
@ -284,12 +288,14 @@ inline bool ToLocal(v8::internal::MaybeHandle<v8::internal::Object> maybe,
MAKE_TO_LOCAL(ToLocal, Context, Context)
MAKE_TO_LOCAL(ToLocal, Object, Value)
MAKE_TO_LOCAL(ToLocal, Module, Module)
MAKE_TO_LOCAL(ToLocal, Name, Name)
MAKE_TO_LOCAL(ToLocal, String, String)
MAKE_TO_LOCAL(ToLocal, Symbol, Symbol)
MAKE_TO_LOCAL(ToLocal, JSRegExp, RegExp)
MAKE_TO_LOCAL(ToLocal, JSReceiver, Object)
MAKE_TO_LOCAL(ToLocal, JSObject, Object)
MAKE_TO_LOCAL(ToLocal, JSFunction, Function)
MAKE_TO_LOCAL(ToLocal, JSArray, Array)
MAKE_TO_LOCAL(ToLocal, JSMap, Map)
MAKE_TO_LOCAL(ToLocal, JSSet, Set)

7
deps/v8/src/arguments.h поставляемый
Просмотреть файл

@ -81,21 +81,20 @@ double ClobberDoubleRegisters(double x1, double x2, double x3, double x4);
// TODO(cbruni): add global flag to check whether any tracing events have been
// enabled.
// TODO(cbruni): Convert the IsContext CHECK back to a DCHECK.
#define RUNTIME_FUNCTION_RETURNS_TYPE(Type, Name) \
static INLINE(Type __RT_impl_##Name(Arguments args, Isolate* isolate)); \
\
V8_NOINLINE static Type Stats_##Name(int args_length, Object** args_object, \
Isolate* isolate) { \
RuntimeCallTimerScope timer(isolate, &RuntimeCallStats::Name); \
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.runtime"), \
"V8.Runtime_" #Name); \
Arguments args(args_length, args_object); \
TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED( \
isolate, &tracing::TraceEventStatsTable::Name); \
return __RT_impl_##Name(args, isolate); \
} \
\
Type Name(int args_length, Object** args_object, Isolate* isolate) { \
CHECK(isolate->context() == nullptr || isolate->context()->IsContext()); \
DCHECK(isolate->context() == nullptr || isolate->context()->IsContext()); \
CLOBBER_DOUBLE_REGISTERS(); \
if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || \
FLAG_runtime_call_stats)) { \

1
deps/v8/src/arm/OWNERS поставляемый
Просмотреть файл

@ -1 +0,0 @@
rmcilroy@chromium.org

2
deps/v8/src/arm/assembler-arm-inl.h поставляемый
Просмотреть файл

@ -46,7 +46,7 @@
namespace v8 {
namespace internal {
bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); }
bool CpuFeatures::SupportsCrankshaft() { return true; }
bool CpuFeatures::SupportsSimd128() { return false; }

580
deps/v8/src/arm/assembler-arm.cc поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

27
deps/v8/src/arm/assembler-arm.h поставляемый
Просмотреть файл

@ -1022,7 +1022,8 @@ class Assembler : public AssemblerBase {
void bkpt(uint32_t imm16); // v5 and above
void svc(uint32_t imm24, Condition cond = al);
// Synchronization instructions
// Synchronization instructions.
// On ARMv6, an equivalent CP15 operation will be used.
void dmb(BarrierOption option);
void dsb(BarrierOption option);
void isb(BarrierOption option);
@ -1258,6 +1259,19 @@ class Assembler : public AssemblerBase {
void vcmp(const SwVfpRegister src1, const float src2,
const Condition cond = al);
void vmaxnm(const DwVfpRegister dst,
const DwVfpRegister src1,
const DwVfpRegister src2);
void vmaxnm(const SwVfpRegister dst,
const SwVfpRegister src1,
const SwVfpRegister src2);
void vminnm(const DwVfpRegister dst,
const DwVfpRegister src1,
const DwVfpRegister src2);
void vminnm(const SwVfpRegister dst,
const SwVfpRegister src1,
const SwVfpRegister src2);
// VSEL supports cond in {eq, ne, ge, lt, gt, le, vs, vc}.
void vsel(const Condition cond,
const DwVfpRegister dst,
@ -1289,8 +1303,8 @@ class Assembler : public AssemblerBase {
const Condition cond = al);
// Support for NEON.
// All these APIs support D0 to D31 and Q0 to Q15.
// All these APIs support D0 to D31 and Q0 to Q15.
void vld1(NeonSize size,
const NeonListOperand& dst,
const NeonMemOperand& src);
@ -1299,6 +1313,9 @@ class Assembler : public AssemblerBase {
const NeonMemOperand& dst);
void vmovl(NeonDataType dt, QwNeonRegister dst, DwVfpRegister src);
// Currently, vswp supports only D0 to D31.
void vswp(DwVfpRegister srcdst0, DwVfpRegister srcdst1);
// Pseudo instructions
// Different nop operations are used by the code generator to detect certain
@ -1586,6 +1603,12 @@ class Assembler : public AssemblerBase {
(pc_offset() < no_const_pool_before_);
}
bool VfpRegisterIsAvailable(DwVfpRegister reg) {
DCHECK(reg.is_valid());
return IsEnabled(VFP32DREGS) ||
(reg.reg_code < LowDwVfpRegister::kMaxNumLowRegisters);
}
private:
int next_buffer_check_; // pc offset of next buffer check

377
deps/v8/src/arm/code-stubs-arm.cc поставляемый
Просмотреть файл

@ -553,17 +553,14 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
// 3) Fall through to both_loaded_as_doubles.
// 4) Jump to lhs_not_nan.
// In cases 3 and 4 we have found out we were dealing with a number-number
// comparison. If VFP3 is supported the double values of the numbers have
// been loaded into d7 and d6. Otherwise, the double values have been loaded
// into r0, r1, r2, and r3.
// comparison. The double values of the numbers have been loaded into d7 (lhs)
// and d6 (rhs).
EmitSmiNonsmiComparison(masm, lhs, rhs, &lhs_not_nan, &slow, strict());
__ bind(&both_loaded_as_doubles);
// The arguments have been converted to doubles and stored in d6 and d7, if
// VFP3 is supported, or in r0, r1, r2, and r3.
// The arguments have been converted to doubles and stored in d6 and d7.
__ bind(&lhs_not_nan);
Label no_nan;
// ARMv7 VFP3 instructions to implement double precision comparison.
__ VFPCompareAndSetFlags(d7, d6);
Label nan;
__ b(vs, &nan);
@ -1646,7 +1643,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// r2 : feedback vector
// r3 : slot in feedback vector (Smi)
Label initialize, done, miss, megamorphic, not_array_function;
Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@ -1666,7 +1662,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
Register weak_value = r9;
__ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset));
__ cmp(r1, weak_value);
__ b(eq, &done_increment_count);
__ b(eq, &done);
__ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex);
__ b(eq, &done);
__ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset));
@ -1689,7 +1685,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5);
__ cmp(r1, r5);
__ b(ne, &megamorphic);
__ jmp(&done_increment_count);
__ jmp(&done);
__ bind(&miss);
@ -1718,32 +1714,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
// slot.
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub);
__ b(&done_initialize_count);
__ b(&done);
__ bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub);
__ bind(&done_initialize_count);
// Initialize the call counter.
__ Move(r5, Operand(Smi::FromInt(1)));
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
__ b(&done);
__ bind(&done);
__ bind(&done_increment_count);
// Increment the call count for monomorphic function calls.
// Increment the call count for all function calls.
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r5, r5, Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(r4, FieldMemOperand(r5, 0));
__ add(r4, r4, Operand(Smi::FromInt(1)));
__ str(r4, FieldMemOperand(r5, 0));
__ bind(&done);
}
void CallConstructStub::Generate(MacroAssembler* masm) {
// r0 : number of arguments
// r1 : the function to call
@ -1785,6 +1771,17 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// Note: feedback_vector and slot are clobbered after the call.
static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
Register slot) {
__ add(feedback_vector, feedback_vector,
Operand::PointerOffsetFromSmiKey(slot));
__ add(feedback_vector, feedback_vector,
Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(slot, FieldMemOperand(feedback_vector, 0));
__ add(slot, slot, Operand(Smi::FromInt(1)));
__ str(slot, FieldMemOperand(feedback_vector, 0));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// r1 - function
@ -1798,11 +1795,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
__ mov(r0, Operand(arg_count()));
// Increment the call count for monomorphic function calls.
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(r3, FieldMemOperand(r2, 0));
__ add(r3, r3, Operand(Smi::FromInt(1)));
__ str(r3, FieldMemOperand(r2, 0));
IncrementCallCount(masm, r2, r3);
__ mov(r2, r4);
__ mov(r3, r1);
@ -1815,7 +1808,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// r1 - function
// r3 - slot id (Smi)
// r2 - vector
Label extra_checks_or_miss, call, call_function;
Label extra_checks_or_miss, call, call_function, call_count_incremented;
int argc = arg_count();
ParameterCount actual(argc);
@ -1845,14 +1838,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(r1, &extra_checks_or_miss);
// Increment the call count for monomorphic function calls.
__ add(r2, r2, Operand::PointerOffsetFromSmiKey(r3));
__ add(r2, r2, Operand(FixedArray::kHeaderSize + kPointerSize));
__ ldr(r3, FieldMemOperand(r2, 0));
__ add(r3, r3, Operand(Smi::FromInt(1)));
__ str(r3, FieldMemOperand(r2, 0));
__ bind(&call_function);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, r2, r3);
__ mov(r0, Operand(argc));
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
@ -1893,6 +1883,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize));
__ bind(&call);
// Increment the call count for megamorphic function calls.
IncrementCallCount(masm, r2, r3);
__ bind(&call_count_incremented);
__ mov(r0, Operand(argc));
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
@ -1919,11 +1914,6 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ cmp(r4, ip);
__ b(ne, &miss);
// Initialize the call counter.
__ Move(r5, Operand(Smi::FromInt(1)));
__ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3));
__ str(r5, FieldMemOperand(r4, FixedArray::kHeaderSize + kPointerSize));
// Store the function. Use a stub since we need a frame for allocation.
// r2 - vector
// r3 - slot
@ -1931,9 +1921,13 @@ void CallICStub::Generate(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ Push(r2);
__ Push(r3);
__ Push(cp, r1);
__ CallStub(&create_stub);
__ Pop(cp, r1);
__ Pop(r3);
__ Pop(r2);
}
__ jmp(&call_function);
@ -1943,7 +1937,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&miss);
GenerateMiss(masm);
__ jmp(&call);
__ jmp(&call_count_incremented);
}
@ -2131,291 +2125,6 @@ void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
}
void SubStringStub::Generate(MacroAssembler* masm) {
Label runtime;
// Stack frame on entry.
// lr: return address
// sp[0]: to
// sp[4]: from
// sp[8]: string
// This stub is called from the native-call %_SubString(...), so
// nothing can be assumed about the arguments. It is tested that:
// "string" is a sequential string,
// both "from" and "to" are smis, and
// 0 <= from <= to <= string.length.
// If any of these assumptions fail, we call the runtime system.
const int kToOffset = 0 * kPointerSize;
const int kFromOffset = 1 * kPointerSize;
const int kStringOffset = 2 * kPointerSize;
__ Ldrd(r2, r3, MemOperand(sp, kToOffset));
STATIC_ASSERT(kFromOffset == kToOffset + 4);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
// Arithmetic shift right by one un-smi-tags. In this case we rotate right
// instead because we bail out on non-smi values: ROR and ASR are equivalent
// for smis but they set the flags in a way that's easier to optimize.
__ mov(r2, Operand(r2, ROR, 1), SetCC);
__ mov(r3, Operand(r3, ROR, 1), SetCC, cc);
// If either to or from had the smi tag bit set, then C is set now, and N
// has the same value: we rotated by 1, so the bottom bit is now the top bit.
// We want to bailout to runtime here if From is negative. In that case, the
// next instruction is not executed and we fall through to bailing out to
// runtime.
// Executed if both r2 and r3 are untagged integers.
__ sub(r2, r2, Operand(r3), SetCC, cc);
// One of the above un-smis or the above SUB could have set N==1.
__ b(mi, &runtime); // Either "from" or "to" is not an smi, or from > to.
// Make sure first argument is a string.
__ ldr(r0, MemOperand(sp, kStringOffset));
__ JumpIfSmi(r0, &runtime);
Condition is_string = masm->IsObjectStringType(r0, r1);
__ b(NegateCondition(is_string), &runtime);
Label single_char;
__ cmp(r2, Operand(1));
__ b(eq, &single_char);
// Short-cut for the case of trivial substring.
Label return_r0;
// r0: original string
// r2: result string length
__ ldr(r4, FieldMemOperand(r0, String::kLengthOffset));
__ cmp(r2, Operand(r4, ASR, 1));
// Return original string.
__ b(eq, &return_r0);
// Longer than original string's length or negative: unsafe arguments.
__ b(hi, &runtime);
// Shorter than original string's length: an actual substring.
// Deal with different string types: update the index if necessary
// and put the underlying string into r5.
// r0: original string
// r1: instance type
// r2: length
// r3: from index (untagged)
Label underlying_unpacked, sliced_string, seq_or_external_string;
// If the string is not indirect, it can only be sequential or external.
STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
STATIC_ASSERT(kIsIndirectStringMask != 0);
__ tst(r1, Operand(kIsIndirectStringMask));
__ b(eq, &seq_or_external_string);
__ tst(r1, Operand(kSlicedNotConsMask));
__ b(ne, &sliced_string);
// Cons string. Check whether it is flat, then fetch first part.
__ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset));
__ CompareRoot(r5, Heap::kempty_stringRootIndex);
__ b(ne, &runtime);
__ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset));
// Update instance type.
__ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
__ jmp(&underlying_unpacked);
__ bind(&sliced_string);
// Sliced string. Fetch parent and correct start index by offset.
__ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
__ ldr(r4, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ add(r3, r3, Operand(r4, ASR, 1)); // Add offset to index.
// Update instance type.
__ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset));
__ jmp(&underlying_unpacked);
__ bind(&seq_or_external_string);
// Sequential or external string. Just move string to the expected register.
__ mov(r5, r0);
__ bind(&underlying_unpacked);
if (FLAG_string_slices) {
Label copy_routine;
// r5: underlying subject string
// r1: instance type of underlying subject string
// r2: length
// r3: adjusted start index (untagged)
__ cmp(r2, Operand(SlicedString::kMinLength));
// Short slice. Copy instead of slicing.
__ b(lt, &copy_routine);
// Allocate new sliced string. At this point we do not reload the instance
// type including the string encoding because we simply rely on the info
// provided by the original string. It does not matter if the original
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ tst(r1, Operand(kStringEncodingMask));
__ b(eq, &two_byte_slice);
__ AllocateOneByteSlicedString(r0, r2, r6, r4, &runtime);
__ jmp(&set_slice_header);
__ bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(r0, r2, r6, r4, &runtime);
__ bind(&set_slice_header);
__ mov(r3, Operand(r3, LSL, 1));
__ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset));
__ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset));
__ jmp(&return_r0);
__ bind(&copy_routine);
}
// r5: underlying subject string
// r1: instance type of underlying subject string
// r2: length
// r3: adjusted start index (untagged)
Label two_byte_sequential, sequential_string, allocate_result;
STATIC_ASSERT(kExternalStringTag != 0);
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r1, Operand(kExternalStringTag));
__ b(eq, &sequential_string);
// Handle external string.
// Rule out short external strings.
STATIC_ASSERT(kShortExternalStringTag != 0);
__ tst(r1, Operand(kShortExternalStringTag));
__ b(ne, &runtime);
__ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset));
// r5 already points to the first character of underlying string.
__ jmp(&allocate_result);
__ bind(&sequential_string);
// Locate first character of underlying subject string.
STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
__ add(r5, r5, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
__ bind(&allocate_result);
// Sequential acii string. Allocate the result.
STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ tst(r1, Operand(kStringEncodingMask));
__ b(eq, &two_byte_sequential);
// Allocate and copy the resulting one-byte string.
__ AllocateOneByteString(r0, r2, r4, r6, r1, &runtime);
// Locate first character of substring to copy.
__ add(r5, r5, r3);
// Locate first character of result.
__ add(r1, r0, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
// r0: result string
// r1: first character of result string
// r2: result string length
// r5: first character of substring to copy
STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharacters(
masm, r1, r5, r2, r3, String::ONE_BYTE_ENCODING);
__ jmp(&return_r0);
// Allocate and copy the resulting two-byte string.
__ bind(&two_byte_sequential);
__ AllocateTwoByteString(r0, r2, r4, r6, r1, &runtime);
// Locate first character of substring to copy.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ add(r5, r5, Operand(r3, LSL, 1));
// Locate first character of result.
__ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
// r0: result string.
// r1: first character of result.
// r2: result length.
// r5: first character of substring to copy.
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharacters(
masm, r1, r5, r2, r3, String::TWO_BYTE_ENCODING);
__ bind(&return_r0);
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ Drop(3);
__ Ret();
// Just jump to runtime to create the sub string.
__ bind(&runtime);
__ TailCallRuntime(Runtime::kSubString);
__ bind(&single_char);
// r0: original string
// r1: instance type
// r2: length
// r3: from index (untagged)
__ SmiTag(r3, r3);
StringCharAtGenerator generator(r0, r3, r2, r0, &runtime, &runtime, &runtime,
RECEIVER_IS_STRING);
generator.GenerateFast(masm);
__ Drop(3);
__ Ret();
generator.SkipSlow(masm, &runtime);
}
void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in r0.
Label is_number;
__ JumpIfSmi(r0, &is_number);
__ CompareObjectType(r0, r1, r1, FIRST_NONSTRING_TYPE);
// r0: receiver
// r1: receiver instance type
__ Ret(lo);
Label not_heap_number;
__ cmp(r1, Operand(HEAP_NUMBER_TYPE));
__ b(ne, &not_heap_number);
__ bind(&is_number);
NumberToStringStub stub(isolate());
__ TailCallStub(&stub);
__ bind(&not_heap_number);
Label not_oddball;
__ cmp(r1, Operand(ODDBALL_TYPE));
__ b(ne, &not_oddball);
__ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
__ Ret();
__ bind(&not_oddball);
__ push(r0); // Push argument.
__ TailCallRuntime(Runtime::kToString);
}
void ToNameStub::Generate(MacroAssembler* masm) {
// The ToName stub takes one argument in r0.
Label is_number;
__ JumpIfSmi(r0, &is_number);
STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
__ CompareObjectType(r0, r1, r1, LAST_NAME_TYPE);
// r0: receiver
// r1: receiver instance type
__ Ret(ls);
Label not_heap_number;
__ cmp(r1, Operand(HEAP_NUMBER_TYPE));
__ b(ne, &not_heap_number);
__ bind(&is_number);
NumberToStringStub stub(isolate());
__ TailCallStub(&stub);
__ bind(&not_heap_number);
Label not_oddball;
__ cmp(r1, Operand(ODDBALL_TYPE));
__ b(ne, &not_oddball);
__ ldr(r0, FieldMemOperand(r0, Oddball::kToStringOffset));
__ Ret();
__ bind(&not_oddball);
__ push(r0); // Push argument.
__ TailCallRuntime(Runtime::kToName);
}
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@ -3275,16 +2984,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_scratch;
__ and_(regs_.scratch0(), regs_.object(), Operand(~Page::kPageAlignmentMask));
__ ldr(regs_.scratch1(),
MemOperand(regs_.scratch0(),
MemoryChunk::kWriteBarrierCounterOffset));
__ sub(regs_.scratch1(), regs_.scratch1(), Operand(1), SetCC);
__ str(regs_.scratch1(),
MemOperand(regs_.scratch0(),
MemoryChunk::kWriteBarrierCounterOffset));
__ b(mi, &need_incremental);
// Let's look at the color of the object: If it is not black we don't have
// to inform the incremental marker.
__ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@ -3712,7 +3411,7 @@ static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
__ ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
// Load the map into the correct register.
DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
__ mov(feedback, too_far);
__ add(pc, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
@ -4425,7 +4124,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
__ cmp(r6, Operand(kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
@ -4763,7 +4462,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ bind(&allocate);
__ cmp(r6, Operand(Page::kMaxRegularHeapObjectSize));
__ cmp(r6, Operand(kMaxRegularHeapObjectSize));
__ b(gt, &too_big_for_new_space);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);

2
deps/v8/src/arm/codegen-arm.cc поставляемый
Просмотреть файл

@ -39,6 +39,7 @@ MemCopyUint8Function CreateMemCopyUint8Function(Isolate* isolate,
Label less_4;
if (CpuFeatures::IsSupported(NEON)) {
CpuFeatureScope scope(&masm, NEON);
Label loop, less_256, less_128, less_64, less_32, _16_or_less, _8_or_less;
Label size_less_than_8;
__ pld(MemOperand(src, 0));
@ -193,6 +194,7 @@ MemCopyUint16Uint8Function CreateMemCopyUint16Uint8Function(
Register src = r1;
Register chars = r2;
if (CpuFeatures::IsSupported(NEON)) {
CpuFeatureScope scope(&masm, NEON);
Register temp = r3;
Label loop;

24
deps/v8/src/arm/constants-arm.h поставляемый
Просмотреть файл

@ -477,40 +477,42 @@ class Instruction {
*reinterpret_cast<Instr*>(this) = value;
}
// Read one particular bit out of the instruction bits.
// Extract a single bit from the instruction bits and return it as bit 0 in
// the result.
inline int Bit(int nr) const {
return (InstructionBits() >> nr) & 1;
}
// Read a bit field's value out of the instruction bits.
// Extract a bit field <hi:lo> from the instruction bits and return it in the
// least-significant bits of the result.
inline int Bits(int hi, int lo) const {
return (InstructionBits() >> lo) & ((2 << (hi - lo)) - 1);
}
// Read a bit field out of the instruction bits.
// Read a bit field <hi:lo>, leaving its position unchanged in the result.
inline int BitField(int hi, int lo) const {
return InstructionBits() & (((2 << (hi - lo)) - 1) << lo);
}
// Static support.
// Read one particular bit out of the instruction bits.
// Extract a single bit from the instruction bits and return it as bit 0 in
// the result.
static inline int Bit(Instr instr, int nr) {
return (instr >> nr) & 1;
}
// Read the value of a bit field out of the instruction bits.
// Extract a bit field <hi:lo> from the instruction bits and return it in the
// least-significant bits of the result.
static inline int Bits(Instr instr, int hi, int lo) {
return (instr >> lo) & ((2 << (hi - lo)) - 1);
}
// Read a bit field out of the instruction bits.
// Read a bit field <hi:lo>, leaving its position unchanged in the result.
static inline int BitField(Instr instr, int hi, int lo) {
return instr & (((2 << (hi - lo)) - 1) << lo);
}
// Accessors for the different named fields used in the ARM encoding.
// The naming of these accessor corresponds to figure A3-1.
//
@ -525,13 +527,11 @@ class Instruction {
// Generally applicable fields
inline Condition ConditionValue() const {
return static_cast<Condition>(Bits(31, 28));
}
inline int ConditionValue() const { return Bits(31, 28); }
inline Condition ConditionField() const {
return static_cast<Condition>(BitField(31, 28));
}
DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionValue);
DECLARE_STATIC_TYPED_ACCESSOR(int, ConditionValue);
DECLARE_STATIC_TYPED_ACCESSOR(Condition, ConditionField);
inline int TypeValue() const { return Bits(27, 25); }

23
deps/v8/src/arm/deoptimizer-arm.cc поставляемый
Просмотреть файл

@ -119,14 +119,20 @@ void Deoptimizer::TableEntryGenerator::Generate() {
DCHECK(kDoubleRegZero.code() == 14);
DCHECK(kScratchDoubleReg.code() == 15);
// Check CPU flags for number of registers, setting the Z condition flag.
__ CheckFor32DRegs(ip);
{
// We use a run-time check for VFP32DREGS.
CpuFeatureScope scope(masm(), VFP32DREGS,
CpuFeatureScope::kDontCheckSupported);
// Push registers d0-d15, and possibly d16-d31, on the stack.
// If d16-d31 are not pushed, decrease the stack pointer instead.
__ vstm(db_w, sp, d16, d31, ne);
__ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq);
__ vstm(db_w, sp, d0, d15);
// Check CPU flags for number of registers, setting the Z condition flag.
__ CheckFor32DRegs(ip);
// Push registers d0-d15, and possibly d16-d31, on the stack.
// If d16-d31 are not pushed, decrease the stack pointer instead.
__ vstm(db_w, sp, d16, d31, ne);
__ sub(sp, sp, Operand(16 * kDoubleSize), LeaveCC, eq);
__ vstm(db_w, sp, d0, d15);
}
// Push all 16 registers (needed to populate FrameDescription::registers_).
// TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
@ -259,9 +265,6 @@ void Deoptimizer::TableEntryGenerator::Generate() {
__ cmp(r4, r1);
__ b(lt, &outer_push_loop);
// Check CPU flags for number of registers, setting the Z condition flag.
__ CheckFor32DRegs(ip);
__ ldr(r1, MemOperand(r0, Deoptimizer::input_offset()));
for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
int code = config->GetAllocatableDoubleCode(i);

86
deps/v8/src/arm/disasm-arm.cc поставляемый
Просмотреть файл

@ -105,6 +105,8 @@ class Decoder {
void DecodeType6(Instruction* instr);
// Type 7 includes special Debugger instructions.
int DecodeType7(Instruction* instr);
// CP15 coprocessor instructions.
void DecodeTypeCP15(Instruction* instr);
// For VFP support.
void DecodeTypeVFP(Instruction* instr);
void DecodeType6CoprocessorIns(Instruction* instr);
@ -1279,18 +1281,16 @@ void Decoder::DecodeType3(Instruction* instr) {
break;
}
}
if (FLAG_enable_sudiv) {
if (instr->Bits(5, 4) == 0x1) {
if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
if (instr->Bit(21) == 0x1) {
// UDIV (in V8 notation matching ARM ISA format) rn = rm/rs
Format(instr, "udiv'cond'b 'rn, 'rm, 'rs");
} else {
// SDIV (in V8 notation matching ARM ISA format) rn = rm/rs
Format(instr, "sdiv'cond'b 'rn, 'rm, 'rs");
}
break;
if (instr->Bits(5, 4) == 0x1) {
if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
if (instr->Bit(21) == 0x1) {
// UDIV (in V8 notation matching ARM ISA format) rn = rm/rs
Format(instr, "udiv'cond'b 'rn, 'rm, 'rs");
} else {
// SDIV (in V8 notation matching ARM ISA format) rn = rm/rs
Format(instr, "sdiv'cond'b 'rn, 'rm, 'rs");
}
break;
}
}
Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
@ -1374,7 +1374,18 @@ int Decoder::DecodeType7(Instruction* instr) {
Format(instr, "svc'cond 'svc");
}
} else {
DecodeTypeVFP(instr);
switch (instr->CoprocessorValue()) {
case 10: // Fall through.
case 11:
DecodeTypeVFP(instr);
break;
case 15:
DecodeTypeCP15(instr);
break;
default:
Unknown(instr);
break;
}
}
return Instruction::kInstrSize;
}
@ -1556,6 +1567,34 @@ void Decoder::DecodeTypeVFP(Instruction* instr) {
}
}
void Decoder::DecodeTypeCP15(Instruction* instr) {
VERIFY((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0));
VERIFY(instr->CoprocessorValue() == 15);
if (instr->Bit(4) == 1) {
int crn = instr->Bits(19, 16);
int crm = instr->Bits(3, 0);
int opc1 = instr->Bits(23, 21);
int opc2 = instr->Bits(7, 5);
if ((opc1 == 0) && (crn == 7)) {
// ARMv6 memory barrier operations.
// Details available in ARM DDI 0406C.b, B3-1750.
if ((crm == 10) && (opc2 == 5)) {
Format(instr, "mcr'cond (CP15DMB)");
} else if ((crm == 10) && (opc2 == 4)) {
Format(instr, "mcr'cond (CP15DSB)");
} else if ((crm == 5) && (opc2 == 4)) {
Format(instr, "mcr'cond (CP15ISB)");
} else {
Unknown(instr);
}
} else {
Unknown(instr);
}
} else {
Unknown(instr);
}
}
void Decoder::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
Instruction* instr) {
@ -1786,6 +1825,13 @@ void Decoder::DecodeSpecialCondition(Instruction* instr) {
int imm3 = instr->Bits(21, 19);
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"vmovl.u%d q%d, d%d", imm3*8, Vd, Vm);
} else if ((instr->Bits(21, 16) == 0x32) && (instr->Bits(11, 7) == 0) &&
(instr->Bit(4) == 0)) {
int Vd = instr->VFPDRegValue(kDoublePrecision);
int Vm = instr->VFPMRegValue(kDoublePrecision);
char rtype = (instr->Bit(6) == 0) ? 'd' : 'q';
out_buffer_pos_ += SNPrintF(out_buffer_ + out_buffer_pos_,
"vswp %c%d, %c%d", rtype, Vd, rtype, Vm);
} else {
Unknown(instr);
}
@ -1898,6 +1944,22 @@ void Decoder::DecodeSpecialCondition(Instruction* instr) {
UNREACHABLE(); // Case analysis is exhaustive.
break;
}
} else if ((instr->Opc1Value() == 0x4) && (instr->Bits(11, 9) == 0x5) &&
(instr->Bit(4) == 0x0)) {
// VMAXNM, VMINNM (floating-point)
if (instr->SzValue() == 0x1) {
if (instr->Bit(6) == 0x1) {
Format(instr, "vminnm.f64 'Dd, 'Dn, 'Dm");
} else {
Format(instr, "vmaxnm.f64 'Dd, 'Dn, 'Dm");
}
} else {
if (instr->Bit(6) == 0x1) {
Format(instr, "vminnm.f32 'Sd, 'Sn, 'Sm");
} else {
Format(instr, "vmaxnm.f32 'Sd, 'Sn, 'Sm");
}
}
} else {
Unknown(instr);
}

26
deps/v8/src/arm/interface-descriptors-arm.cc поставляемый
Просмотреть файл

@ -42,13 +42,9 @@ const Register StoreDescriptor::SlotRegister() { return r4; }
const Register StoreWithVectorDescriptor::VectorRegister() { return r3; }
const Register VectorStoreTransitionDescriptor::SlotRegister() { return r4; }
const Register VectorStoreTransitionDescriptor::VectorRegister() { return r3; }
const Register VectorStoreTransitionDescriptor::MapRegister() { return r5; }
const Register StoreTransitionDescriptor::MapRegister() { return r3; }
const Register StoreTransitionDescriptor::SlotRegister() { return r4; }
const Register StoreTransitionDescriptor::VectorRegister() { return r3; }
const Register StoreTransitionDescriptor::MapRegister() { return r5; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return r2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return r0; }
@ -375,7 +371,7 @@ void ArgumentAdaptorDescriptor::InitializePlatformSpecific(
&default_descriptor);
}
void ApiCallbackDescriptorBase::InitializePlatformSpecific(
void ApiCallbackDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
static PlatformInterfaceDescriptor default_descriptor =
PlatformInterfaceDescriptor(CAN_INLINE_TARGET_ADDRESS);
@ -414,7 +410,19 @@ void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
r0, // argument count (not including receiver)
r3, // new target
r1, // constructor to call
r2 // address of the first argument
r2, // allocation site feedback if available, undefined otherwise
r4 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
r0, // argument count (not including receiver)
r1, // target to call checked to be Array function
r2, // allocation site feedback if available, undefined otherwise
r3 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}

185
deps/v8/src/arm/macro-assembler-arm.cc поставляемый
Просмотреть файл

@ -250,15 +250,17 @@ void MacroAssembler::Move(Register dst, Register src, Condition cond) {
}
}
void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src) {
void MacroAssembler::Move(SwVfpRegister dst, SwVfpRegister src,
Condition cond) {
if (!dst.is(src)) {
vmov(dst, src);
vmov(dst, src, cond);
}
}
void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src,
Condition cond) {
if (!dst.is(src)) {
vmov(dst, src);
vmov(dst, src, cond);
}
}
@ -285,6 +287,7 @@ void MacroAssembler::And(Register dst, Register src1, const Operand& src2,
!src2.must_output_reloc_info(this) &&
CpuFeatures::IsSupported(ARMv7) &&
base::bits::IsPowerOfTwo32(src2.immediate() + 1)) {
CpuFeatureScope scope(this, ARMv7);
ubfx(dst, src1, 0,
WhichPowerOf2(static_cast<uint32_t>(src2.immediate()) + 1), cond);
} else {
@ -303,6 +306,7 @@ void MacroAssembler::Ubfx(Register dst, Register src1, int lsb, int width,
mov(dst, Operand(dst, LSR, lsb), LeaveCC, cond);
}
} else {
CpuFeatureScope scope(this, ARMv7);
ubfx(dst, src1, lsb, width, cond);
}
}
@ -323,6 +327,7 @@ void MacroAssembler::Sbfx(Register dst, Register src1, int lsb, int width,
mov(dst, Operand(dst, ASR, shift_down), LeaveCC, cond);
}
} else {
CpuFeatureScope scope(this, ARMv7);
sbfx(dst, src1, lsb, width, cond);
}
}
@ -346,6 +351,7 @@ void MacroAssembler::Bfi(Register dst,
mov(scratch, Operand(scratch, LSL, lsb));
orr(dst, dst, scratch);
} else {
CpuFeatureScope scope(this, ARMv7);
bfi(dst, src, lsb, width, cond);
}
}
@ -358,6 +364,7 @@ void MacroAssembler::Bfc(Register dst, Register src, int lsb, int width,
int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
bic(dst, src, Operand(mask));
} else {
CpuFeatureScope scope(this, ARMv7);
Move(dst, src, cond);
bfc(dst, lsb, width, cond);
}
@ -404,15 +411,6 @@ void MacroAssembler::Store(Register src,
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index,
Condition cond) {
if (CpuFeatures::IsSupported(MOVW_MOVT_IMMEDIATE_LOADS) &&
isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
!predictable_code_size()) {
// The CPU supports fast immediate values, and this root will never
// change. We will load it as a relocatable immediate value.
Handle<Object> root = isolate()->heap()->root_handle(index);
mov(destination, Operand(root), LeaveCC, cond);
return;
}
ldr(destination, MemOperand(kRootRegister, index << kPointerSizeLog2), cond);
}
@ -430,9 +428,7 @@ void MacroAssembler::InNewSpace(Register object,
Condition cond,
Label* branch) {
DCHECK(cond == eq || cond == ne);
const int mask =
(1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
CheckPageFlag(object, scratch, mask, cond, branch);
CheckPageFlag(object, scratch, MemoryChunk::kIsInNewSpaceMask, cond, branch);
}
@ -1054,6 +1050,7 @@ void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
vmov(dst, VmovIndexLo, src);
}
}
void MacroAssembler::LslPair(Register dst_low, Register dst_high,
Register src_low, Register src_high,
Register scratch, Register shift) {
@ -1971,7 +1968,7 @@ void MacroAssembler::Allocate(int object_size,
Register scratch2,
Label* gc_required,
AllocationFlags flags) {
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
DCHECK(object_size <= kMaxRegularHeapObjectSize);
DCHECK((flags & ALLOCATION_FOLDED) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
@ -2049,7 +2046,6 @@ void MacroAssembler::Allocate(int object_size,
// point, so we cannot just use add().
DCHECK(object_size > 0);
Register source = result;
Condition cond = al;
int shift = 0;
while (object_size != 0) {
if (((object_size >> shift) & 0x03) == 0) {
@ -2060,9 +2056,8 @@ void MacroAssembler::Allocate(int object_size,
shift += 8;
Operand bits_operand(bits);
DCHECK(bits_operand.instructions_required(this) == 1);
add(result_end, source, bits_operand, LeaveCC, cond);
add(result_end, source, bits_operand);
source = result_end;
cond = cc;
}
}
@ -2226,7 +2221,7 @@ void MacroAssembler::FastAllocate(Register object_size, Register result,
void MacroAssembler::FastAllocate(int object_size, Register result,
Register scratch1, Register scratch2,
AllocationFlags flags) {
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
DCHECK(object_size <= kMaxRegularHeapObjectSize);
DCHECK(!AreAliased(result, scratch1, scratch2, ip));
// Make object size into bytes.
@ -2261,7 +2256,6 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
// this point, so we cannot just use add().
DCHECK(object_size > 0);
Register source = result;
Condition cond = al;
int shift = 0;
while (object_size != 0) {
if (((object_size >> shift) & 0x03) == 0) {
@ -2272,9 +2266,8 @@ void MacroAssembler::FastAllocate(int object_size, Register result,
shift += 8;
Operand bits_operand(bits);
DCHECK(bits_operand.instructions_required(this) == 1);
add(result_end, source, bits_operand, LeaveCC, cond);
add(result_end, source, bits_operand);
source = result_end;
cond = cc;
}
}
@ -2650,7 +2643,8 @@ void MacroAssembler::IndexFromHash(Register hash, Register index) {
void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
if (CpuFeatures::IsSupported(VFP3)) {
if (CpuFeatures::IsSupported(VFPv3)) {
CpuFeatureScope scope(this, VFPv3);
vmov(value.low(), smi);
vcvt_f64_s32(value, 1);
} else {
@ -2807,6 +2801,7 @@ void MacroAssembler::GetLeastBitsFromSmi(Register dst,
Register src,
int num_least_bits) {
if (CpuFeatures::IsSupported(ARMv7) && !predictable_code_size()) {
CpuFeatureScope scope(this, ARMv7);
ubfx(dst, src, kSmiTagSize, num_least_bits);
} else {
SmiUntag(dst, src);
@ -3416,6 +3411,7 @@ void MacroAssembler::CheckFor32DRegs(Register scratch) {
void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
CpuFeatureScope scope(this, VFP32DREGS, CpuFeatureScope::kDontCheckSupported);
CheckFor32DRegs(scratch);
vstm(db_w, location, d16, d31, ne);
sub(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
@ -3424,12 +3420,151 @@ void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
CpuFeatureScope scope(this, VFP32DREGS, CpuFeatureScope::kDontCheckSupported);
CheckFor32DRegs(scratch);
vldm(ia_w, location, d0, d15);
vldm(ia_w, location, d16, d31, ne);
add(location, location, Operand(16 * kDoubleSize), LeaveCC, eq);
}
template <typename T>
void MacroAssembler::FloatMaxHelper(T result, T left, T right,
Label* out_of_line) {
// This trivial case is caught sooner, so that the out-of-line code can be
// completely avoided.
DCHECK(!left.is(right));
if (CpuFeatures::IsSupported(ARMv8)) {
CpuFeatureScope scope(this, ARMv8);
VFPCompareAndSetFlags(left, right);
b(vs, out_of_line);
vmaxnm(result, left, right);
} else {
Label done;
VFPCompareAndSetFlags(left, right);
b(vs, out_of_line);
// Avoid a conditional instruction if the result register is unique.
bool aliased_result_reg = result.is(left) || result.is(right);
Move(result, right, aliased_result_reg ? mi : al);
Move(result, left, gt);
b(ne, &done);
// Left and right are equal, but check for +/-0.
VFPCompareAndSetFlags(left, 0.0);
b(eq, out_of_line);
// The arguments are equal and not zero, so it doesn't matter which input we
// pick. We have already moved one input into the result (if it didn't
// already alias) so there's nothing more to do.
bind(&done);
}
}
template <typename T>
void MacroAssembler::FloatMaxOutOfLineHelper(T result, T left, T right) {
DCHECK(!left.is(right));
// ARMv8: At least one of left and right is a NaN.
// Anything else: At least one of left and right is a NaN, or both left and
// right are zeroes with unknown sign.
// If left and right are +/-0, select the one with the most positive sign.
// If left or right are NaN, vadd propagates the appropriate one.
vadd(result, left, right);
}
template <typename T>
void MacroAssembler::FloatMinHelper(T result, T left, T right,
Label* out_of_line) {
// This trivial case is caught sooner, so that the out-of-line code can be
// completely avoided.
DCHECK(!left.is(right));
if (CpuFeatures::IsSupported(ARMv8)) {
CpuFeatureScope scope(this, ARMv8);
VFPCompareAndSetFlags(left, right);
b(vs, out_of_line);
vminnm(result, left, right);
} else {
Label done;
VFPCompareAndSetFlags(left, right);
b(vs, out_of_line);
// Avoid a conditional instruction if the result register is unique.
bool aliased_result_reg = result.is(left) || result.is(right);
Move(result, left, aliased_result_reg ? mi : al);
Move(result, right, gt);
b(ne, &done);
// Left and right are equal, but check for +/-0.
VFPCompareAndSetFlags(left, 0.0);
// If the arguments are equal and not zero, it doesn't matter which input we
// pick. We have already moved one input into the result (if it didn't
// already alias) so there's nothing more to do.
b(ne, &done);
// At this point, both left and right are either 0 or -0.
// We could use a single 'vorr' instruction here if we had NEON support.
// The algorithm used is -((-L) + (-R)), which is most efficiently expressed
// as -((-L) - R).
if (left.is(result)) {
DCHECK(!right.is(result));
vneg(result, left);
vsub(result, result, right);
vneg(result, result);
} else {
DCHECK(!left.is(result));
vneg(result, right);
vsub(result, result, left);
vneg(result, result);
}
bind(&done);
}
}
template <typename T>
void MacroAssembler::FloatMinOutOfLineHelper(T result, T left, T right) {
DCHECK(!left.is(right));
// At least one of left and right is a NaN. Use vadd to propagate the NaN
// appropriately. +/-0 is handled inline.
vadd(result, left, right);
}
void MacroAssembler::FloatMax(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right, Label* out_of_line) {
FloatMaxHelper(result, left, right, out_of_line);
}
void MacroAssembler::FloatMin(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right, Label* out_of_line) {
FloatMinHelper(result, left, right, out_of_line);
}
void MacroAssembler::FloatMax(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right, Label* out_of_line) {
FloatMaxHelper(result, left, right, out_of_line);
}
void MacroAssembler::FloatMin(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right, Label* out_of_line) {
FloatMinHelper(result, left, right, out_of_line);
}
void MacroAssembler::FloatMaxOutOfLine(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right) {
FloatMaxOutOfLineHelper(result, left, right);
}
void MacroAssembler::FloatMinOutOfLine(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right) {
FloatMinOutOfLineHelper(result, left, right);
}
void MacroAssembler::FloatMaxOutOfLine(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right) {
FloatMaxOutOfLineHelper(result, left, right);
}
void MacroAssembler::FloatMinOutOfLine(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right) {
FloatMinOutOfLineHelper(result, left, right);
}
void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
Register first, Register second, Register scratch1, Register scratch2,

52
deps/v8/src/arm/macro-assembler-arm.h поставляемый
Просмотреть файл

@ -123,6 +123,18 @@ class MacroAssembler: public Assembler {
void CallDeoptimizer(Address target);
static int CallDeoptimizerSize();
// Emit code that loads |parameter_index|'th parameter from the stack to
// the register according to the CallInterfaceDescriptor definition.
// |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
// below the caller's sp.
template <class Descriptor>
void LoadParameterFromStack(
Register reg, typename Descriptor::ParameterIndices parameter_index,
int sp_to_ra_offset_in_words = 0) {
DCHECK(Descriptor::kPassLastArgsOnStack);
UNIMPLEMENTED();
}
// Emit code to discard a non-negative number of pointer-sized elements
// from the stack, clobbering only the sp register.
void Drop(int count, Condition cond = al);
@ -170,8 +182,8 @@ class MacroAssembler: public Assembler {
mov(dst, src, sbit, cond);
}
}
void Move(SwVfpRegister dst, SwVfpRegister src);
void Move(DwVfpRegister dst, DwVfpRegister src);
void Move(SwVfpRegister dst, SwVfpRegister src, Condition cond = al);
void Move(DwVfpRegister dst, DwVfpRegister src, Condition cond = al);
void Load(Register dst, const MemOperand& src, Representation r);
void Store(Register src, const MemOperand& dst, Representation r);
@ -1082,6 +1094,32 @@ class MacroAssembler: public Assembler {
// values to location, restoring [d0..(d15|d31)].
void RestoreFPRegs(Register location, Register scratch);
// Perform a floating-point min or max operation with the
// (IEEE-754-compatible) semantics of ARM64's fmin/fmax. Some cases, typically
// NaNs or +/-0.0, are expected to be rare and are handled in out-of-line
// code. The specific behaviour depends on supported instructions.
//
// These functions assume (and assert) that !left.is(right). It is permitted
// for the result to alias either input register.
void FloatMax(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right,
Label* out_of_line);
void FloatMin(SwVfpRegister result, SwVfpRegister left, SwVfpRegister right,
Label* out_of_line);
void FloatMax(DwVfpRegister result, DwVfpRegister left, DwVfpRegister right,
Label* out_of_line);
void FloatMin(DwVfpRegister result, DwVfpRegister left, DwVfpRegister right,
Label* out_of_line);
// Generate out-of-line cases for the macros above.
void FloatMaxOutOfLine(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right);
void FloatMinOutOfLine(SwVfpRegister result, SwVfpRegister left,
SwVfpRegister right);
void FloatMaxOutOfLine(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right);
void FloatMinOutOfLine(DwVfpRegister result, DwVfpRegister left,
DwVfpRegister right);
// ---------------------------------------------------------------------------
// Runtime calls
@ -1513,6 +1551,16 @@ class MacroAssembler: public Assembler {
MemOperand SafepointRegisterSlot(Register reg);
MemOperand SafepointRegistersAndDoublesSlot(Register reg);
// Implementation helpers for FloatMin and FloatMax.
template <typename T>
void FloatMaxHelper(T result, T left, T right, Label* out_of_line);
template <typename T>
void FloatMinHelper(T result, T left, T right, Label* out_of_line);
template <typename T>
void FloatMaxOutOfLineHelper(T result, T left, T right);
template <typename T>
void FloatMinOutOfLineHelper(T result, T left, T right);
bool generating_stub_;
bool has_frame_;
// This handle will be patched with the code object on installation.

170
deps/v8/src/arm/simulator-arm.cc поставляемый
Просмотреть файл

@ -575,8 +575,8 @@ void Simulator::set_last_debugger_input(char* input) {
last_debugger_input_ = input;
}
void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
size_t size) {
void Simulator::FlushICache(base::CustomMatcherHashMap* i_cache,
void* start_addr, size_t size) {
intptr_t start = reinterpret_cast<intptr_t>(start_addr);
int intra_line = (start & CachePage::kLineMask);
start -= intra_line;
@ -596,7 +596,8 @@ void Simulator::FlushICache(base::HashMap* i_cache, void* start_addr,
}
}
CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
CachePage* Simulator::GetCachePage(base::CustomMatcherHashMap* i_cache,
void* page) {
base::HashMap::Entry* entry = i_cache->LookupOrInsert(page, ICacheHash(page));
if (entry->value == NULL) {
CachePage* new_page = new CachePage();
@ -607,7 +608,8 @@ CachePage* Simulator::GetCachePage(base::HashMap* i_cache, void* page) {
// Flush from start up to and not including start + size.
void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
void Simulator::FlushOnePage(base::CustomMatcherHashMap* i_cache,
intptr_t start, int size) {
DCHECK(size <= CachePage::kPageSize);
DCHECK(AllOnOnePage(start, size - 1));
DCHECK((start & CachePage::kLineMask) == 0);
@ -619,7 +621,8 @@ void Simulator::FlushOnePage(base::HashMap* i_cache, intptr_t start, int size) {
memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
}
void Simulator::CheckICache(base::HashMap* i_cache, Instruction* instr) {
void Simulator::CheckICache(base::CustomMatcherHashMap* i_cache,
Instruction* instr) {
intptr_t address = reinterpret_cast<intptr_t>(instr);
void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
void* line = reinterpret_cast<void*>(address & (~CachePage::kLineMask));
@ -652,7 +655,7 @@ void Simulator::Initialize(Isolate* isolate) {
Simulator::Simulator(Isolate* isolate) : isolate_(isolate) {
i_cache_ = isolate_->simulator_i_cache();
if (i_cache_ == NULL) {
i_cache_ = new base::HashMap(&ICacheMatch);
i_cache_ = new base::CustomMatcherHashMap(&ICacheMatch);
isolate_->set_simulator_i_cache(i_cache_);
}
Initialize(isolate);
@ -783,7 +786,8 @@ class Redirection {
// static
void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
Redirection* first) {
Redirection::DeleteChain(first);
if (i_cache != nullptr) {
for (base::HashMap::Entry* entry = i_cache->Start(); entry != nullptr;
@ -2886,26 +2890,24 @@ void Simulator::DecodeType3(Instruction* instr) {
return;
}
}
if (FLAG_enable_sudiv) {
if (instr->Bits(5, 4) == 0x1) {
if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
// (s/u)div (in V8 notation matching ARM ISA format) rn = rm/rs
// Format(instr, "'(s/u)div'cond'b 'rn, 'rm, 'rs);
int rm = instr->RmValue();
int32_t rm_val = get_register(rm);
int rs = instr->RsValue();
int32_t rs_val = get_register(rs);
int32_t ret_val = 0;
// udiv
if (instr->Bit(21) == 0x1) {
ret_val = bit_cast<int32_t>(base::bits::UnsignedDiv32(
bit_cast<uint32_t>(rm_val), bit_cast<uint32_t>(rs_val)));
} else {
ret_val = base::bits::SignedDiv32(rm_val, rs_val);
}
set_register(rn, ret_val);
return;
if (instr->Bits(5, 4) == 0x1) {
if ((instr->Bit(22) == 0x0) && (instr->Bit(20) == 0x1)) {
// (s/u)div (in V8 notation matching ARM ISA format) rn = rm/rs
// Format(instr, "'(s/u)div'cond'b 'rn, 'rm, 'rs);
int rm = instr->RmValue();
int32_t rm_val = get_register(rm);
int rs = instr->RsValue();
int32_t rs_val = get_register(rs);
int32_t ret_val = 0;
// udiv
if (instr->Bit(21) == 0x1) {
ret_val = bit_cast<int32_t>(base::bits::UnsignedDiv32(
bit_cast<uint32_t>(rm_val), bit_cast<uint32_t>(rs_val)));
} else {
ret_val = base::bits::SignedDiv32(rm_val, rs_val);
}
set_register(rn, ret_val);
return;
}
}
// Format(instr, "'memop'cond'b 'rd, ['rn, -'shift_rm]'w");
@ -3026,7 +3028,17 @@ void Simulator::DecodeType7(Instruction* instr) {
if (instr->Bit(24) == 1) {
SoftwareInterrupt(instr);
} else {
DecodeTypeVFP(instr);
switch (instr->CoprocessorValue()) {
case 10: // Fall through.
case 11:
DecodeTypeVFP(instr);
break;
case 15:
DecodeTypeCP15(instr);
break;
default:
UNIMPLEMENTED();
}
}
}
@ -3335,6 +3347,31 @@ void Simulator::DecodeTypeVFP(Instruction* instr) {
}
}
void Simulator::DecodeTypeCP15(Instruction* instr) {
DCHECK((instr->TypeValue() == 7) && (instr->Bit(24) == 0x0));
DCHECK(instr->CoprocessorValue() == 15);
if (instr->Bit(4) == 1) {
// mcr
int crn = instr->Bits(19, 16);
int crm = instr->Bits(3, 0);
int opc1 = instr->Bits(23, 21);
int opc2 = instr->Bits(7, 5);
if ((opc1 == 0) && (crn == 7)) {
// ARMv6 memory barrier operations.
// Details available in ARM DDI 0406C.b, B3-1750.
if (((crm == 10) && (opc2 == 5)) || // CP15DMB
((crm == 10) && (opc2 == 4)) || // CP15DSB
((crm == 5) && (opc2 == 4))) { // CP15ISB
// These are ignored by the simulator for now.
} else {
UNIMPLEMENTED();
}
}
} else {
UNIMPLEMENTED();
}
}
void Simulator::DecodeVMOVBetweenCoreAndSinglePrecisionRegisters(
Instruction* instr) {
@ -3750,6 +3787,21 @@ void Simulator::DecodeSpecialCondition(Instruction* instr) {
e++;
}
set_q_register(Vd, reinterpret_cast<uint64_t*>(to));
} else if ((instr->Bits(21, 16) == 0x32) && (instr->Bits(11, 7) == 0) &&
(instr->Bit(4) == 0)) {
int vd = instr->VFPDRegValue(kDoublePrecision);
int vm = instr->VFPMRegValue(kDoublePrecision);
if (instr->Bit(6) == 0) {
// vswp Dd, Dm.
uint64_t dval, mval;
get_d_register(vd, &dval);
get_d_register(vm, &mval);
set_d_register(vm, &dval);
set_d_register(vd, &mval);
} else {
// Q register vswp unimplemented.
UNIMPLEMENTED();
}
} else {
UNIMPLEMENTED();
}
@ -3848,6 +3900,7 @@ void Simulator::DecodeSpecialCondition(Instruction* instr) {
} else if (instr->SpecialValue() == 0xA && instr->Bits(22, 20) == 7) {
// dsb, dmb, isb: ignore instruction for now.
// TODO(binji): implement
// Also refer to the ARMv6 CP15 equivalents in DecodeTypeCP15.
} else {
UNIMPLEMENTED();
}
@ -3908,6 +3961,69 @@ void Simulator::DecodeSpecialCondition(Instruction* instr) {
sd_value = canonicalizeNaN(sd_value);
set_s_register_from_float(d, sd_value);
}
} else if ((instr->Opc1Value() == 0x4) && (instr->Bits(11, 9) == 0x5) &&
(instr->Bit(4) == 0x0)) {
if (instr->SzValue() == 0x1) {
int m = instr->VFPMRegValue(kDoublePrecision);
int n = instr->VFPNRegValue(kDoublePrecision);
int d = instr->VFPDRegValue(kDoublePrecision);
double dn_value = get_double_from_d_register(n);
double dm_value = get_double_from_d_register(m);
double dd_value;
if (instr->Bit(6) == 0x1) { // vminnm
if ((dn_value < dm_value) || std::isnan(dm_value)) {
dd_value = dn_value;
} else if ((dm_value < dn_value) || std::isnan(dn_value)) {
dd_value = dm_value;
} else {
DCHECK_EQ(dn_value, dm_value);
// Make sure that we pick the most negative sign for +/-0.
dd_value = std::signbit(dn_value) ? dn_value : dm_value;
}
} else { // vmaxnm
if ((dn_value > dm_value) || std::isnan(dm_value)) {
dd_value = dn_value;
} else if ((dm_value > dn_value) || std::isnan(dn_value)) {
dd_value = dm_value;
} else {
DCHECK_EQ(dn_value, dm_value);
// Make sure that we pick the most positive sign for +/-0.
dd_value = std::signbit(dn_value) ? dm_value : dn_value;
}
}
dd_value = canonicalizeNaN(dd_value);
set_d_register_from_double(d, dd_value);
} else {
int m = instr->VFPMRegValue(kSinglePrecision);
int n = instr->VFPNRegValue(kSinglePrecision);
int d = instr->VFPDRegValue(kSinglePrecision);
float sn_value = get_float_from_s_register(n);
float sm_value = get_float_from_s_register(m);
float sd_value;
if (instr->Bit(6) == 0x1) { // vminnm
if ((sn_value < sm_value) || std::isnan(sm_value)) {
sd_value = sn_value;
} else if ((sm_value < sn_value) || std::isnan(sn_value)) {
sd_value = sm_value;
} else {
DCHECK_EQ(sn_value, sm_value);
// Make sure that we pick the most negative sign for +/-0.
sd_value = std::signbit(sn_value) ? sn_value : sm_value;
}
} else { // vmaxnm
if ((sn_value > sm_value) || std::isnan(sm_value)) {
sd_value = sn_value;
} else if ((sm_value > sn_value) || std::isnan(sn_value)) {
sd_value = sm_value;
} else {
DCHECK_EQ(sn_value, sm_value);
// Make sure that we pick the most positive sign for +/-0.
sd_value = std::signbit(sn_value) ? sm_value : sn_value;
}
}
sd_value = canonicalizeNaN(sd_value);
set_s_register_from_float(d, sd_value);
}
} else {
UNIMPLEMENTED();
}

19
deps/v8/src/arm/simulator-arm.h поставляемый
Просмотреть файл

@ -200,7 +200,7 @@ class Simulator {
// Call on program start.
static void Initialize(Isolate* isolate);
static void TearDown(base::HashMap* i_cache, Redirection* first);
static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
// V8 generally calls into generated JS code with 5 parameters and into
// generated RegExp code with 7 parameters. This is a convenience function,
@ -222,7 +222,8 @@ class Simulator {
char* last_debugger_input() { return last_debugger_input_; }
// ICache checking.
static void FlushICache(base::HashMap* i_cache, void* start, size_t size);
static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
size_t size);
// Returns true if pc register contains one of the 'special_values' defined
// below (bad_lr, end_sim_pc).
@ -327,6 +328,9 @@ class Simulator {
void DecodeType6(Instruction* instr);
void DecodeType7(Instruction* instr);
// CP15 coprocessor instructions.
void DecodeTypeCP15(Instruction* instr);
// Support for VFP.
void DecodeTypeVFP(Instruction* instr);
void DecodeType6CoprocessorIns(Instruction* instr);
@ -341,9 +345,12 @@ class Simulator {
void InstructionDecode(Instruction* instr);
// ICache.
static void CheckICache(base::HashMap* i_cache, Instruction* instr);
static void FlushOnePage(base::HashMap* i_cache, intptr_t start, int size);
static CachePage* GetCachePage(base::HashMap* i_cache, void* page);
static void CheckICache(base::CustomMatcherHashMap* i_cache,
Instruction* instr);
static void FlushOnePage(base::CustomMatcherHashMap* i_cache, intptr_t start,
int size);
static CachePage* GetCachePage(base::CustomMatcherHashMap* i_cache,
void* page);
// Runtime call support.
static void* RedirectExternalReference(
@ -403,7 +410,7 @@ class Simulator {
char* last_debugger_input_;
// Icache simulation
base::HashMap* i_cache_;
base::CustomMatcherHashMap* i_cache_;
// Registered breakpoints.
Instruction* break_pc_;

1
deps/v8/src/arm64/OWNERS поставляемый
Просмотреть файл

@ -1 +0,0 @@
rmcilroy@chromium.org

407
deps/v8/src/arm64/code-stubs-arm64.cc поставляемый
Просмотреть файл

@ -1089,6 +1089,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ Ldr(cp, MemOperand(cp));
__ Mov(jssp, Operand(pending_handler_sp_address));
__ Ldr(jssp, MemOperand(jssp));
__ Mov(csp, jssp);
__ Mov(fp, Operand(pending_handler_fp_address));
__ Ldr(fp, MemOperand(fp));
@ -1845,7 +1846,6 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
// feedback_vector : the feedback vector
// index : slot in feedback vector (smi)
Label initialize, done, miss, megamorphic, not_array_function;
Label done_initialize_count, done_increment_count;
DCHECK_EQ(*TypeFeedbackVector::MegamorphicSentinel(masm->isolate()),
masm->isolate()->heap()->megamorphic_symbol());
@ -1868,7 +1868,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
Label check_allocation_site;
__ Ldr(feedback_value, FieldMemOperand(feedback, WeakCell::kValueOffset));
__ Cmp(function, feedback_value);
__ B(eq, &done_increment_count);
__ B(eq, &done);
__ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
__ B(eq, &done);
__ Ldr(feedback_map, FieldMemOperand(feedback, HeapObject::kMapOffset));
@ -1890,7 +1890,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, scratch1);
__ Cmp(function, scratch1);
__ B(ne, &megamorphic);
__ B(&done_increment_count);
__ B(&done);
__ Bind(&miss);
@ -1921,33 +1921,22 @@ static void GenerateRecordCallTarget(MacroAssembler* masm, Register argc,
CreateAllocationSiteStub create_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &create_stub, argc, function,
feedback_vector, index, new_target);
__ B(&done_initialize_count);
__ B(&done);
__ Bind(&not_array_function);
CreateWeakCellStub weak_cell_stub(masm->isolate());
CallStubInRecordCallTarget(masm, &weak_cell_stub, argc, function,
feedback_vector, index, new_target);
__ bind(&done_initialize_count);
// Initialize the call counter.
__ Mov(scratch1, Operand(Smi::FromInt(1)));
__ Adds(scratch2, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Str(scratch1,
FieldMemOperand(scratch2, FixedArray::kHeaderSize + kPointerSize));
__ b(&done);
__ Bind(&done);
__ bind(&done_increment_count);
// Increment the call count for monomorphic function calls.
// Increment the call count for all function calls.
__ Add(scratch1, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Add(scratch1, scratch1, Operand(FixedArray::kHeaderSize + kPointerSize));
__ Ldr(scratch2, FieldMemOperand(scratch1, 0));
__ Add(scratch2, scratch2, Operand(Smi::FromInt(1)));
__ Str(scratch2, FieldMemOperand(scratch1, 0));
__ Bind(&done);
}
@ -1995,6 +1984,17 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
// Note: feedback_vector and slot are clobbered after the call.
static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
Register slot) {
__ Add(feedback_vector, feedback_vector,
Operand::UntagSmiAndScale(slot, kPointerSizeLog2));
__ Add(feedback_vector, feedback_vector,
Operand(FixedArray::kHeaderSize + kPointerSize));
__ Ldr(slot, FieldMemOperand(feedback_vector, 0));
__ Add(slot, slot, Operand(Smi::FromInt(1)));
__ Str(slot, FieldMemOperand(feedback_vector, 0));
}
void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
// x1 - function
@ -2014,13 +2014,7 @@ void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
__ Mov(x0, Operand(arg_count()));
// Increment the call count for monomorphic function calls.
__ Add(feedback_vector, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Add(feedback_vector, feedback_vector,
Operand(FixedArray::kHeaderSize + kPointerSize));
__ Ldr(index, FieldMemOperand(feedback_vector, 0));
__ Add(index, index, Operand(Smi::FromInt(1)));
__ Str(index, FieldMemOperand(feedback_vector, 0));
IncrementCallCount(masm, feedback_vector, index);
// Set up arguments for the array constructor stub.
Register allocation_site_arg = feedback_vector;
@ -2038,7 +2032,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
// x1 - function
// x3 - slot id (Smi)
// x2 - vector
Label extra_checks_or_miss, call, call_function;
Label extra_checks_or_miss, call, call_function, call_count_incremented;
int argc = arg_count();
ParameterCount actual(argc);
@ -2073,16 +2067,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
// convincing us that we have a monomorphic JSFunction.
__ JumpIfSmi(function, &extra_checks_or_miss);
// Increment the call count for monomorphic function calls.
__ Add(feedback_vector, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Add(feedback_vector, feedback_vector,
Operand(FixedArray::kHeaderSize + kPointerSize));
__ Ldr(index, FieldMemOperand(feedback_vector, 0));
__ Add(index, index, Operand(Smi::FromInt(1)));
__ Str(index, FieldMemOperand(feedback_vector, 0));
__ Bind(&call_function);
// Increment the call count for monomorphic function calls.
IncrementCallCount(masm, feedback_vector, index);
__ Mov(x0, argc);
__ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
tail_call_mode()),
@ -2106,6 +2095,7 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ jmp(&miss);
}
// TODO(mvstanton): the code below is effectively disabled. Investigate.
__ JumpIfRoot(x4, Heap::kuninitialized_symbolRootIndex, &miss);
// We are going megamorphic. If the feedback is a JSFunction, it is fine
@ -2118,6 +2108,11 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize));
__ Bind(&call);
// Increment the call count for megamorphic function calls.
IncrementCallCount(masm, feedback_vector, index);
__ Bind(&call_count_incremented);
__ Mov(x0, argc);
__ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
RelocInfo::CODE_TARGET);
@ -2143,12 +2138,6 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ Cmp(x4, x5);
__ B(ne, &miss);
// Initialize the call counter.
__ Mov(x5, Smi::FromInt(1));
__ Adds(x4, feedback_vector,
Operand::UntagSmiAndScale(index, kPointerSizeLog2));
__ Str(x5, FieldMemOperand(x4, FixedArray::kHeaderSize + kPointerSize));
// Store the function. Use a stub since we need a frame for allocation.
// x2 - vector
// x3 - slot
@ -2156,9 +2145,13 @@ void CallICStub::Generate(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
CreateWeakCellStub create_stub(masm->isolate());
__ Push(feedback_vector, index);
__ Push(cp, function);
__ CallStub(&create_stub);
__ Pop(cp, function);
__ Pop(feedback_vector, index);
}
__ B(&call_function);
@ -2168,7 +2161,8 @@ void CallICStub::Generate(MacroAssembler* masm) {
__ bind(&miss);
GenerateMiss(masm);
__ B(&call);
// The runtime increments the call count in the vector for us.
__ B(&call_count_incremented);
}
@ -2681,321 +2675,6 @@ void CompareICStub::GenerateMiss(MacroAssembler* masm) {
}
void SubStringStub::Generate(MacroAssembler* masm) {
ASM_LOCATION("SubStringStub::Generate");
Label runtime;
// Stack frame on entry.
// lr: return address
// jssp[0]: substring "to" offset
// jssp[8]: substring "from" offset
// jssp[16]: pointer to string object
// This stub is called from the native-call %_SubString(...), so
// nothing can be assumed about the arguments. It is tested that:
// "string" is a sequential string,
// both "from" and "to" are smis, and
// 0 <= from <= to <= string.length (in debug mode.)
// If any of these assumptions fail, we call the runtime system.
static const int kToOffset = 0 * kPointerSize;
static const int kFromOffset = 1 * kPointerSize;
static const int kStringOffset = 2 * kPointerSize;
Register to = x0;
Register from = x15;
Register input_string = x10;
Register input_length = x11;
Register input_type = x12;
Register result_string = x0;
Register result_length = x1;
Register temp = x3;
__ Peek(to, kToOffset);
__ Peek(from, kFromOffset);
// Check that both from and to are smis. If not, jump to runtime.
__ JumpIfEitherNotSmi(from, to, &runtime);
__ SmiUntag(from);
__ SmiUntag(to);
// Calculate difference between from and to. If to < from, branch to runtime.
__ Subs(result_length, to, from);
__ B(mi, &runtime);
// Check from is positive.
__ Tbnz(from, kWSignBit, &runtime);
// Make sure first argument is a string.
__ Peek(input_string, kStringOffset);
__ JumpIfSmi(input_string, &runtime);
__ IsObjectJSStringType(input_string, input_type, &runtime);
Label single_char;
__ Cmp(result_length, 1);
__ B(eq, &single_char);
// Short-cut for the case of trivial substring.
Label return_x0;
__ Ldrsw(input_length,
UntagSmiFieldMemOperand(input_string, String::kLengthOffset));
__ Cmp(result_length, input_length);
__ CmovX(x0, input_string, eq);
// Return original string.
__ B(eq, &return_x0);
// Longer than original string's length or negative: unsafe arguments.
__ B(hi, &runtime);
// Shorter than original string's length: an actual substring.
// x0 to substring end character offset
// x1 result_length length of substring result
// x10 input_string pointer to input string object
// x10 unpacked_string pointer to unpacked string object
// x11 input_length length of input string
// x12 input_type instance type of input string
// x15 from substring start character offset
// Deal with different string types: update the index if necessary and put
// the underlying string into register unpacked_string.
Label underlying_unpacked, sliced_string, seq_or_external_string;
Label update_instance_type;
// If the string is not indirect, it can only be sequential or external.
STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
STATIC_ASSERT(kIsIndirectStringMask != 0);
// Test for string types, and branch/fall through to appropriate unpacking
// code.
__ Tst(input_type, kIsIndirectStringMask);
__ B(eq, &seq_or_external_string);
__ Tst(input_type, kSlicedNotConsMask);
__ B(ne, &sliced_string);
Register unpacked_string = input_string;
// Cons string. Check whether it is flat, then fetch first part.
__ Ldr(temp, FieldMemOperand(input_string, ConsString::kSecondOffset));
__ JumpIfNotRoot(temp, Heap::kempty_stringRootIndex, &runtime);
__ Ldr(unpacked_string,
FieldMemOperand(input_string, ConsString::kFirstOffset));
__ B(&update_instance_type);
__ Bind(&sliced_string);
// Sliced string. Fetch parent and correct start index by offset.
__ Ldrsw(temp,
UntagSmiFieldMemOperand(input_string, SlicedString::kOffsetOffset));
__ Add(from, from, temp);
__ Ldr(unpacked_string,
FieldMemOperand(input_string, SlicedString::kParentOffset));
__ Bind(&update_instance_type);
__ Ldr(temp, FieldMemOperand(unpacked_string, HeapObject::kMapOffset));
__ Ldrb(input_type, FieldMemOperand(temp, Map::kInstanceTypeOffset));
// Now control must go to &underlying_unpacked. Since the no code is generated
// before then we fall through instead of generating a useless branch.
__ Bind(&seq_or_external_string);
// Sequential or external string. Registers unpacked_string and input_string
// alias, so there's nothing to do here.
// Note that if code is added here, the above code must be updated.
// x0 result_string pointer to result string object (uninit)
// x1 result_length length of substring result
// x10 unpacked_string pointer to unpacked string object
// x11 input_length length of input string
// x12 input_type instance type of input string
// x15 from substring start character offset
__ Bind(&underlying_unpacked);
if (FLAG_string_slices) {
Label copy_routine;
__ Cmp(result_length, SlicedString::kMinLength);
// Short slice. Copy instead of slicing.
__ B(lt, &copy_routine);
// Allocate new sliced string. At this point we do not reload the instance
// type including the string encoding because we simply rely on the info
// provided by the original string. It does not matter if the original
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyway due to externalized strings.
Label two_byte_slice, set_slice_header;
STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ Tbz(input_type, MaskToBit(kStringEncodingMask), &two_byte_slice);
__ AllocateOneByteSlicedString(result_string, result_length, x3, x4,
&runtime);
__ B(&set_slice_header);
__ Bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(result_string, result_length, x3, x4,
&runtime);
__ Bind(&set_slice_header);
__ SmiTag(from);
__ Str(from, FieldMemOperand(result_string, SlicedString::kOffsetOffset));
__ Str(unpacked_string,
FieldMemOperand(result_string, SlicedString::kParentOffset));
__ B(&return_x0);
__ Bind(&copy_routine);
}
// x0 result_string pointer to result string object (uninit)
// x1 result_length length of substring result
// x10 unpacked_string pointer to unpacked string object
// x11 input_length length of input string
// x12 input_type instance type of input string
// x13 unpacked_char0 pointer to first char of unpacked string (uninit)
// x13 substring_char0 pointer to first char of substring (uninit)
// x14 result_char0 pointer to first char of result (uninit)
// x15 from substring start character offset
Register unpacked_char0 = x13;
Register substring_char0 = x13;
Register result_char0 = x14;
Label two_byte_sequential, sequential_string, allocate_result;
STATIC_ASSERT(kExternalStringTag != 0);
STATIC_ASSERT(kSeqStringTag == 0);
__ Tst(input_type, kExternalStringTag);
__ B(eq, &sequential_string);
__ Tst(input_type, kShortExternalStringTag);
__ B(ne, &runtime);
__ Ldr(unpacked_char0,
FieldMemOperand(unpacked_string, ExternalString::kResourceDataOffset));
// unpacked_char0 points to the first character of the underlying string.
__ B(&allocate_result);
__ Bind(&sequential_string);
// Locate first character of underlying subject string.
STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
__ Add(unpacked_char0, unpacked_string,
SeqOneByteString::kHeaderSize - kHeapObjectTag);
__ Bind(&allocate_result);
// Sequential one-byte string. Allocate the result.
STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
__ Tbz(input_type, MaskToBit(kStringEncodingMask), &two_byte_sequential);
// Allocate and copy the resulting one-byte string.
__ AllocateOneByteString(result_string, result_length, x3, x4, x5, &runtime);
// Locate first character of substring to copy.
__ Add(substring_char0, unpacked_char0, from);
// Locate first character of result.
__ Add(result_char0, result_string,
SeqOneByteString::kHeaderSize - kHeapObjectTag);
STATIC_ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
__ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
__ B(&return_x0);
// Allocate and copy the resulting two-byte string.
__ Bind(&two_byte_sequential);
__ AllocateTwoByteString(result_string, result_length, x3, x4, x5, &runtime);
// Locate first character of substring to copy.
__ Add(substring_char0, unpacked_char0, Operand(from, LSL, 1));
// Locate first character of result.
__ Add(result_char0, result_string,
SeqTwoByteString::kHeaderSize - kHeapObjectTag);
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
__ Add(result_length, result_length, result_length);
__ CopyBytes(result_char0, substring_char0, result_length, x3, kCopyLong);
__ Bind(&return_x0);
Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
__ Drop(3);
__ Ret();
__ Bind(&runtime);
__ TailCallRuntime(Runtime::kSubString);
__ bind(&single_char);
// x1: result_length
// x10: input_string
// x12: input_type
// x15: from (untagged)
__ SmiTag(from);
StringCharAtGenerator generator(input_string, from, result_length, x0,
&runtime, &runtime, &runtime,
RECEIVER_IS_STRING);
generator.GenerateFast(masm);
__ Drop(3);
__ Ret();
generator.SkipSlow(masm, &runtime);
}
void ToStringStub::Generate(MacroAssembler* masm) {
// The ToString stub takes one argument in x0.
Label is_number;
__ JumpIfSmi(x0, &is_number);
Label not_string;
__ JumpIfObjectType(x0, x1, x1, FIRST_NONSTRING_TYPE, &not_string, hs);
// x0: receiver
// x1: receiver instance type
__ Ret();
__ Bind(&not_string);
Label not_heap_number;
__ Cmp(x1, HEAP_NUMBER_TYPE);
__ B(ne, &not_heap_number);
__ Bind(&is_number);
NumberToStringStub stub(isolate());
__ TailCallStub(&stub);
__ Bind(&not_heap_number);
Label not_oddball;
__ Cmp(x1, ODDBALL_TYPE);
__ B(ne, &not_oddball);
__ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset));
__ Ret();
__ Bind(&not_oddball);
__ Push(x0); // Push argument.
__ TailCallRuntime(Runtime::kToString);
}
void ToNameStub::Generate(MacroAssembler* masm) {
// The ToName stub takes one argument in x0.
Label is_number;
__ JumpIfSmi(x0, &is_number);
Label not_name;
STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
__ JumpIfObjectType(x0, x1, x1, LAST_NAME_TYPE, &not_name, hi);
// x0: receiver
// x1: receiver instance type
__ Ret();
__ Bind(&not_name);
Label not_heap_number;
__ Cmp(x1, HEAP_NUMBER_TYPE);
__ B(ne, &not_heap_number);
__ Bind(&is_number);
NumberToStringStub stub(isolate());
__ TailCallStub(&stub);
__ Bind(&not_heap_number);
Label not_oddball;
__ Cmp(x1, ODDBALL_TYPE);
__ B(ne, &not_oddball);
__ Ldr(x0, FieldMemOperand(x0, Oddball::kToStringOffset));
__ Ret();
__ Bind(&not_oddball);
__ Push(x0); // Push argument.
__ TailCallRuntime(Runtime::kToName);
}
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@ -3195,16 +2874,6 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
Label need_incremental;
Label need_incremental_pop_scratch;
Register mem_chunk = regs_.scratch0();
Register counter = regs_.scratch1();
__ Bic(mem_chunk, regs_.object(), Page::kPageAlignmentMask);
__ Ldr(counter,
MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset));
__ Subs(counter, counter, 1);
__ Str(counter,
MemOperand(mem_chunk, MemoryChunk::kWriteBarrierCounterOffset));
__ B(mi, &need_incremental);
// If the object is not black we don't have to inform the incremental marker.
__ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black);
@ -3655,7 +3324,7 @@ static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
__ Ldr(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
// Load the map into the correct register.
DCHECK(feedback.is(VectorStoreTransitionDescriptor::MapRegister()));
DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
__ mov(feedback, too_far);
__ Add(receiver_map, receiver_map, Code::kHeaderSize - kHeapObjectTag);
__ Jump(receiver_map);
@ -4673,7 +4342,7 @@ void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate);
__ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
__ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);
@ -5093,7 +4762,7 @@ void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
// Fall back to %AllocateInNewSpace (if not too big).
Label too_big_for_new_space;
__ Bind(&allocate);
__ Cmp(x6, Operand(Page::kMaxRegularHeapObjectSize));
__ Cmp(x6, Operand(kMaxRegularHeapObjectSize));
__ B(gt, &too_big_for_new_space);
{
FrameScope scope(masm, StackFrame::INTERNAL);

Просмотреть файл

@ -42,13 +42,9 @@ const Register StoreDescriptor::SlotRegister() { return x4; }
const Register StoreWithVectorDescriptor::VectorRegister() { return x3; }
const Register VectorStoreTransitionDescriptor::SlotRegister() { return x4; }
const Register VectorStoreTransitionDescriptor::VectorRegister() { return x3; }
const Register VectorStoreTransitionDescriptor::MapRegister() { return x5; }
const Register StoreTransitionDescriptor::MapRegister() { return x3; }
const Register StoreTransitionDescriptor::SlotRegister() { return x4; }
const Register StoreTransitionDescriptor::VectorRegister() { return x3; }
const Register StoreTransitionDescriptor::MapRegister() { return x5; }
const Register StoreGlobalViaContextDescriptor::SlotRegister() { return x2; }
const Register StoreGlobalViaContextDescriptor::ValueRegister() { return x0; }
@ -407,7 +403,7 @@ void ArgumentAdaptorDescriptor::InitializePlatformSpecific(
&default_descriptor);
}
void ApiCallbackDescriptorBase::InitializePlatformSpecific(
void ApiCallbackDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
static PlatformInterfaceDescriptor default_descriptor =
PlatformInterfaceDescriptor(CAN_INLINE_TARGET_ADDRESS);
@ -446,7 +442,19 @@ void InterpreterPushArgsAndConstructDescriptor::InitializePlatformSpecific(
x0, // argument count (not including receiver)
x3, // new target
x1, // constructor to call
x2 // address of the first argument
x2, // allocation site feedback if available, undefined otherwise
x4 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}
void InterpreterPushArgsAndConstructArrayDescriptor::InitializePlatformSpecific(
CallInterfaceDescriptorData* data) {
Register registers[] = {
x0, // argument count (not including receiver)
x1, // target to call checked to be Array function
x2, // allocation site feedback if available, undefined otherwise
x3 // address of the first argument
};
data->InitializePlatformSpecific(arraysize(registers), registers);
}

9
deps/v8/src/arm64/macro-assembler-arm64.cc поставляемый
Просмотреть файл

@ -1571,9 +1571,8 @@ void MacroAssembler::InNewSpace(Register object,
Label* branch) {
DCHECK(cond == eq || cond == ne);
UseScratchRegisterScope temps(this);
const int mask =
(1 << MemoryChunk::IN_FROM_SPACE) | (1 << MemoryChunk::IN_TO_SPACE);
CheckPageFlag(object, temps.AcquireSameSizeAs(object), mask, cond, branch);
CheckPageFlag(object, temps.AcquireSameSizeAs(object),
MemoryChunk::kIsInNewSpaceMask, cond, branch);
}
@ -3037,7 +3036,7 @@ void MacroAssembler::Allocate(int object_size,
Register scratch2,
Label* gc_required,
AllocationFlags flags) {
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
DCHECK(object_size <= kMaxRegularHeapObjectSize);
DCHECK((flags & ALLOCATION_FOLDED) == 0);
if (!FLAG_inline_new) {
if (emit_debug_code()) {
@ -3196,7 +3195,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
void MacroAssembler::FastAllocate(int object_size, Register result,
Register scratch1, Register scratch2,
AllocationFlags flags) {
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
DCHECK(object_size <= kMaxRegularHeapObjectSize);
DCHECK(!AreAliased(result, scratch1, scratch2));
DCHECK(result.Is64Bits() && scratch1.Is64Bits() && scratch2.Is64Bits());

12
deps/v8/src/arm64/macro-assembler-arm64.h поставляемый
Просмотреть файл

@ -742,6 +742,18 @@ class MacroAssembler : public Assembler {
// csp must be aligned to 16 bytes.
void PeekPair(const CPURegister& dst1, const CPURegister& dst2, int offset);
// Emit code that loads |parameter_index|'th parameter from the stack to
// the register according to the CallInterfaceDescriptor definition.
// |sp_to_caller_sp_offset_in_words| specifies the number of words pushed
// below the caller's sp.
template <class Descriptor>
void LoadParameterFromStack(
Register reg, typename Descriptor::ParameterIndices parameter_index,
int sp_to_ra_offset_in_words = 0) {
DCHECK(Descriptor::kPassLastArgsOnStack);
UNIMPLEMENTED();
}
// Claim or drop stack space without actually accessing memory.
//
// In debug mode, both of these will write invalid data into the claimed or

3
deps/v8/src/arm64/simulator-arm64.cc поставляемый
Просмотреть файл

@ -524,7 +524,8 @@ class Redirection {
// static
void Simulator::TearDown(base::HashMap* i_cache, Redirection* first) {
void Simulator::TearDown(base::CustomMatcherHashMap* i_cache,
Redirection* first) {
Redirection::DeleteChain(first);
}

5
deps/v8/src/arm64/simulator-arm64.h поставляемый
Просмотреть файл

@ -151,7 +151,8 @@ typedef SimRegisterBase SimFPRegister; // v0-v31
class Simulator : public DecoderVisitor {
public:
static void FlushICache(base::HashMap* i_cache, void* start, size_t size) {
static void FlushICache(base::CustomMatcherHashMap* i_cache, void* start,
size_t size) {
USE(i_cache);
USE(start);
USE(size);
@ -167,7 +168,7 @@ class Simulator : public DecoderVisitor {
static void Initialize(Isolate* isolate);
static void TearDown(base::HashMap* i_cache, Redirection* first);
static void TearDown(base::CustomMatcherHashMap* i_cache, Redirection* first);
static Simulator* current(v8::internal::Isolate* isolate);

54
deps/v8/src/asmjs/asm-js.cc поставляемый
Просмотреть файл

@ -16,9 +16,9 @@
#include "src/objects.h"
#include "src/parsing/parse-info.h"
#include "src/wasm/encoder.h"
#include "src/wasm/module-decoder.h"
#include "src/wasm/wasm-js.h"
#include "src/wasm/wasm-module-builder.h"
#include "src/wasm/wasm-module.h"
#include "src/wasm/wasm-result.h"
@ -30,29 +30,6 @@ namespace v8 {
namespace internal {
namespace {
i::MaybeHandle<i::FixedArray> CompileModule(
i::Isolate* isolate, const byte* start, const byte* end,
ErrorThrower* thrower,
internal::wasm::ModuleOrigin origin = i::wasm::kWasmOrigin) {
// Decode but avoid a redundant pass over function bodies for verification.
// Verification will happen during compilation.
i::Zone zone(isolate->allocator());
internal::wasm::ModuleResult result = internal::wasm::DecodeWasmModule(
isolate, &zone, start, end, false, origin);
i::MaybeHandle<i::FixedArray> compiled_module;
if (result.failed() && origin == internal::wasm::kAsmJsOrigin) {
thrower->Error("Asm.js converted module failed to decode");
} else if (result.failed()) {
thrower->Failed("", result);
} else {
compiled_module = result.val->CompileFunctions(isolate, thrower);
}
if (result.val) delete result.val;
return compiled_module;
}
Handle<i::Object> StdlibMathMember(i::Isolate* isolate,
Handle<JSReceiver> stdlib,
Handle<Name> name) {
@ -187,9 +164,9 @@ MaybeHandle<FixedArray> AsmJs::ConvertAsmToWasm(ParseInfo* info) {
i::Handle<i::FixedArray> foreign_globals;
auto module = builder.Run(&foreign_globals);
i::MaybeHandle<i::FixedArray> compiled =
CompileModule(info->isolate(), module->begin(), module->end(), &thrower,
internal::wasm::kAsmJsOrigin);
i::MaybeHandle<i::JSObject> compiled = wasm::CreateModuleObjectFromBytes(
info->isolate(), module->begin(), module->end(), &thrower,
internal::wasm::kAsmJsOrigin);
DCHECK(!compiled.is_null());
wasm::AsmTyper::StdlibSet uses = typer.StdlibUses();
@ -223,24 +200,25 @@ MaybeHandle<Object> AsmJs::InstantiateAsmWasm(i::Isolate* isolate,
Handle<FixedArray> wasm_data,
Handle<JSArrayBuffer> memory,
Handle<JSReceiver> foreign) {
i::Handle<i::FixedArray> compiled(i::FixedArray::cast(wasm_data->get(0)));
i::Handle<i::JSObject> module(i::JSObject::cast(wasm_data->get(0)));
i::Handle<i::FixedArray> foreign_globals(
i::FixedArray::cast(wasm_data->get(1)));
ErrorThrower thrower(isolate, "Asm.js -> WebAssembly instantiation");
i::MaybeHandle<i::JSObject> maybe_module_object =
i::wasm::WasmModule::Instantiate(isolate, compiled, foreign, memory);
i::wasm::WasmModule::Instantiate(isolate, &thrower, module, foreign,
memory);
if (maybe_module_object.is_null()) {
return MaybeHandle<Object>();
}
i::Handle<i::Name> name(isolate->factory()->InternalizeOneByteString(
STATIC_CHAR_VECTOR("__foreign_init__")));
i::Handle<i::Name> init_name(isolate->factory()->InternalizeUtf8String(
wasm::AsmWasmBuilder::foreign_init_name));
i::Handle<i::Object> module_object = maybe_module_object.ToHandleChecked();
i::MaybeHandle<i::Object> maybe_init =
i::Object::GetProperty(module_object, name);
i::Object::GetProperty(module_object, init_name);
DCHECK(!maybe_init.is_null());
i::Handle<i::Object> init = maybe_init.ToHandleChecked();
@ -265,10 +243,18 @@ MaybeHandle<Object> AsmJs::InstantiateAsmWasm(i::Isolate* isolate,
i::MaybeHandle<i::Object> retval = i::Execution::Call(
isolate, init, undefined, foreign_globals->length(), foreign_args_array);
delete[] foreign_args_array;
DCHECK(!retval.is_null());
return maybe_module_object;
i::Handle<i::Name> single_function_name(
isolate->factory()->InternalizeUtf8String(
wasm::AsmWasmBuilder::single_function_name));
i::MaybeHandle<i::Object> single_function =
i::Object::GetProperty(module_object, single_function_name);
if (!single_function.is_null() &&
!single_function.ToHandleChecked()->IsUndefined(isolate)) {
return single_function;
}
return module_object;
}
} // namespace internal

19
deps/v8/src/asmjs/asm-js.h поставляемый
Просмотреть файл

@ -5,24 +5,21 @@
#ifndef V8_ASMJS_ASM_JS_H_
#define V8_ASMJS_ASM_JS_H_
#ifndef V8_SHARED
#include "src/allocation.h"
#include "src/base/hashmap.h"
#else
#include "include/v8.h"
#include "src/base/compiler-specific.h"
#endif // !V8_SHARED
#include "src/parsing/parser.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
class JSArrayBuffer;
class ParseInfo;
// Interface to compile and instantiate for asmjs.
class AsmJs {
public:
static MaybeHandle<FixedArray> ConvertAsmToWasm(i::ParseInfo* info);
static bool IsStdlibValid(i::Isolate* isolate, Handle<FixedArray> wasm_data,
static MaybeHandle<FixedArray> ConvertAsmToWasm(ParseInfo* info);
static bool IsStdlibValid(Isolate* isolate, Handle<FixedArray> wasm_data,
Handle<JSReceiver> stdlib);
static MaybeHandle<Object> InstantiateAsmWasm(i::Isolate* isolate,
static MaybeHandle<Object> InstantiateAsmWasm(Isolate* isolate,
Handle<FixedArray> wasm_data,
Handle<JSArrayBuffer> memory,
Handle<JSReceiver> foreign);

69
deps/v8/src/asmjs/asm-typer.cc поставляемый
Просмотреть файл

@ -17,7 +17,6 @@
#include "src/base/bits.h"
#include "src/codegen.h"
#include "src/globals.h"
#include "src/type-cache.h"
#include "src/utils.h"
#define FAIL(node, msg) \
@ -129,14 +128,13 @@ AsmTyper::AsmTyper(Isolate* isolate, Zone* zone, Script* script,
script_(script),
root_(root),
forward_definitions_(zone),
ffi_use_signatures_(zone),
stdlib_types_(zone),
stdlib_math_types_(zone),
module_info_(VariableInfo::ForSpecialSymbol(zone_, kModule)),
global_scope_(ZoneHashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
global_scope_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
local_scope_(ZoneHashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
local_scope_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
stack_limit_(isolate->stack_guard()->real_climit()),
node_types_(zone_),
@ -330,8 +328,8 @@ AsmTyper::VariableInfo* AsmTyper::ImportLookup(Property* import) {
return i->second;
}
AsmTyper::VariableInfo* AsmTyper::Lookup(Variable* variable) {
ZoneHashMap* scope = in_function_ ? &local_scope_ : &global_scope_;
AsmTyper::VariableInfo* AsmTyper::Lookup(Variable* variable) const {
const ZoneHashMap* scope = in_function_ ? &local_scope_ : &global_scope_;
ZoneHashMap::Entry* entry =
scope->Lookup(variable, ComputePointerHash(variable));
if (entry == nullptr && in_function_) {
@ -424,6 +422,8 @@ AsmType* AsmTyper::TypeOf(AstNode* node) const {
return AsmType::None();
}
AsmType* AsmTyper::TypeOf(Variable* v) const { return Lookup(v)->type(); }
AsmTyper::StandardMember AsmTyper::VariableAsStandardMember(Variable* var) {
auto* var_info = Lookup(var);
if (var_info == nullptr) {
@ -606,8 +606,10 @@ AsmType* AsmTyper::ValidateModule(FunctionLiteral* fun) {
if (estatement != nullptr) {
Assignment* assignment = estatement->expression()->AsAssignment();
if (assignment != nullptr && assignment->target()->IsVariableProxy() &&
assignment->target()->AsVariableProxy()->var()->mode() ==
CONST_LEGACY) {
assignment->target()
->AsVariableProxy()
->var()
->is_sloppy_function_name()) {
use_asm_directive = iter.Next();
}
}
@ -760,7 +762,7 @@ AsmType* AsmTyper::ValidateGlobalDeclaration(Assignment* assign) {
bool global_variable = false;
if (value->IsLiteral() || value->IsCall()) {
AsmType* type = nullptr;
RECURSE(type = VariableTypeAnnotations(value));
RECURSE(type = VariableTypeAnnotations(value, true));
target_info = new (zone_) VariableInfo(type);
target_info->set_mutability(VariableInfo::kMutableGlobal);
global_variable = true;
@ -1509,7 +1511,7 @@ AsmType* AsmTyper::ValidateCompareOperation(CompareOperation* cmp) {
}
namespace {
bool IsNegate(BinaryOperation* binop) {
bool IsInvert(BinaryOperation* binop) {
if (binop->op() != Token::BIT_XOR) {
return false;
}
@ -1524,7 +1526,7 @@ bool IsNegate(BinaryOperation* binop) {
}
bool IsUnaryMinus(BinaryOperation* binop) {
// *VIOLATION* The parser replaces uses of +x with x*1.0.
// *VIOLATION* The parser replaces uses of -x with x*-1.
if (binop->op() != Token::MUL) {
return false;
}
@ -1570,7 +1572,7 @@ AsmType* AsmTyper::ValidateBinaryOperation(BinaryOperation* expr) {
}
if (IsUnaryMinus(expr)) {
// *VIOLATION* the parser converts -x to x * -1.0.
// *VIOLATION* the parser converts -x to x * -1.
AsmType* left_type;
RECURSE(left_type = ValidateExpression(expr->left()));
SetTypeOf(expr->right(), left_type);
@ -1595,11 +1597,11 @@ AsmType* AsmTyper::ValidateBinaryOperation(BinaryOperation* expr) {
case Token::BIT_AND:
return ValidateBitwiseANDExpression(expr);
case Token::BIT_XOR:
if (IsNegate(expr)) {
if (IsInvert(expr)) {
auto* left = expr->left();
auto* left_as_binop = left->AsBinaryOperation();
if (left_as_binop != nullptr && IsNegate(left_as_binop)) {
if (left_as_binop != nullptr && IsInvert(left_as_binop)) {
// This is the special ~~ operator.
AsmType* left_type;
RECURSE(left_type = ValidateExpression(left_as_binop->left()));
@ -1660,6 +1662,12 @@ AsmType* AsmTyper::ValidateNumericLiteral(Literal* literal) {
return AsmType::Double();
}
// The parser collapses expressions like !0 and !123 to true/false.
// We therefore need to permit these as alternate versions of 0 / 1.
if (literal->raw_value()->IsTrue() || literal->raw_value()->IsFalse()) {
return AsmType::Int();
}
uint32_t value;
if (!literal->value()->ToUint32(&value)) {
int32_t value;
@ -2305,9 +2313,20 @@ AsmType* AsmTyper::ValidateCall(AsmType* return_type, Call* call) {
FAIL(call, "Calling something that's not a function.");
}
if (callee_type->AsFFIType() != nullptr &&
return_type == AsmType::Float()) {
FAIL(call, "Foreign functions can't return float.");
if (callee_type->AsFFIType() != nullptr) {
if (return_type == AsmType::Float()) {
FAIL(call, "Foreign functions can't return float.");
}
// Record FFI use signature, since the asm->wasm translator must know
// all uses up-front.
ffi_use_signatures_.emplace_back(
FFIUseSignature(call_var_proxy->var(), zone_));
FFIUseSignature* sig = &ffi_use_signatures_.back();
sig->return_type_ = return_type;
sig->arg_types_.reserve(args.size());
for (size_t i = 0; i < args.size(); ++i) {
sig->arg_types_.emplace_back(args[i]);
}
}
if (!callee_type->CanBeInvokedWith(return_type, args)) {
@ -2662,7 +2681,8 @@ AsmType* AsmTyper::ReturnTypeAnnotations(ReturnStatement* statement) {
// 5.4 VariableTypeAnnotations
// Also used for 5.5 GlobalVariableTypeAnnotations
AsmType* AsmTyper::VariableTypeAnnotations(Expression* initializer) {
AsmType* AsmTyper::VariableTypeAnnotations(Expression* initializer,
bool global) {
if (auto* literal = initializer->AsLiteral()) {
if (literal->raw_value()->ContainsDot()) {
SetTypeOf(initializer, AsmType::Double());
@ -2703,10 +2723,13 @@ AsmType* AsmTyper::VariableTypeAnnotations(Expression* initializer) {
"to fround.");
}
if (!src_expr->raw_value()->ContainsDot()) {
FAIL(initializer,
"Invalid float type annotation - expected literal argument to be a "
"floating point literal.");
// Float constants must contain dots in local, but not in globals.
if (!global) {
if (!src_expr->raw_value()->ContainsDot()) {
FAIL(initializer,
"Invalid float type annotation - expected literal argument to be a "
"floating point literal.");
}
}
return AsmType::Float();

26
deps/v8/src/asmjs/asm-typer.h поставляемый
Просмотреть файл

@ -12,12 +12,12 @@
#include "src/allocation.h"
#include "src/asmjs/asm-types.h"
#include "src/ast/ast-type-bounds.h"
#include "src/ast/ast-types.h"
#include "src/ast/ast.h"
#include "src/effects.h"
#include "src/type-info.h"
#include "src/types.h"
#include "src/zone-containers.h"
#include "src/zone.h"
#include "src/zone/zone-containers.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
@ -73,12 +73,26 @@ class AsmTyper final {
const char* error_message() const { return error_message_; }
AsmType* TypeOf(AstNode* node) const;
AsmType* TypeOf(Variable* v) const;
StandardMember VariableAsStandardMember(Variable* var);
typedef std::unordered_set<StandardMember, std::hash<int> > StdlibSet;
StdlibSet StdlibUses() const { return stdlib_uses_; }
// Each FFI import has a usage-site signature associated with it.
struct FFIUseSignature {
Variable* var;
ZoneVector<AsmType*> arg_types_;
AsmType* return_type_;
FFIUseSignature(Variable* v, Zone* zone)
: var(v), arg_types_(zone), return_type_(nullptr) {}
};
const ZoneVector<FFIUseSignature>& FFIUseSignatures() {
return ffi_use_signatures_;
}
private:
friend class v8::internal::wasm::AsmTyperHarnessBuilder;
@ -192,7 +206,7 @@ class AsmTyper final {
// Lookup(Delta, Gamma, x)
//
// Delta is the global_scope_ member, and Gamma, local_scope_.
VariableInfo* Lookup(Variable* variable);
VariableInfo* Lookup(Variable* variable) const;
// All of the ValidateXXX methods below return AsmType::None() in case of
// validation failure.
@ -292,8 +306,9 @@ class AsmTyper final {
// 5.2 ReturnTypeAnnotations
AsmType* ReturnTypeAnnotations(ReturnStatement* statement);
// 5.4 VariableTypeAnnotations
AsmType* VariableTypeAnnotations(Expression* initializer);
// 5.5 GlobalVariableTypeAnnotations
AsmType* VariableTypeAnnotations(Expression* initializer,
bool global = false);
AsmType* ImportExpression(Property* import);
AsmType* NewHeapView(CallNew* new_heap_view);
@ -306,6 +321,7 @@ class AsmTyper final {
AsmType* return_type_ = nullptr;
ZoneVector<VariableInfo*> forward_definitions_;
ZoneVector<FFIUseSignature> ffi_use_signatures_;
ObjectTypeMap stdlib_types_;
ObjectTypeMap stdlib_math_types_;

4
deps/v8/src/asmjs/asm-types.h поставляемый
Просмотреть файл

@ -8,8 +8,8 @@
#include <string>
#include "src/base/macros.h"
#include "src/zone-containers.h"
#include "src/zone.h"
#include "src/zone/zone-containers.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {

452
deps/v8/src/asmjs/asm-wasm-builder.cc поставляемый
Просмотреть файл

@ -32,6 +32,7 @@ namespace wasm {
} while (false)
enum AsmScope { kModuleScope, kInitScope, kFuncScope, kExportScope };
enum ValueFate { kDrop, kLeaveOnStack };
struct ForeignVariable {
Handle<Name> name;
@ -43,14 +44,11 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
public:
AsmWasmBuilderImpl(Isolate* isolate, Zone* zone, FunctionLiteral* literal,
AsmTyper* typer)
: local_variables_(base::HashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
: local_variables_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
functions_(base::HashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
functions_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
global_variables_(base::HashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
global_variables_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
scope_(kModuleScope),
builder_(new (zone) WasmModuleBuilder(zone)),
@ -61,46 +59,43 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
typer_(typer),
breakable_blocks_(zone),
foreign_variables_(zone),
init_function_index_(0),
foreign_init_function_index_(0),
init_function_(nullptr),
foreign_init_function_(nullptr),
next_table_index_(0),
function_tables_(base::HashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
function_tables_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(zone)),
imported_function_table_(this) {
InitializeAstVisitor(isolate);
}
void InitializeInitFunction() {
init_function_index_ = builder_->AddFunction();
FunctionSig::Builder b(zone(), 0, 0);
current_function_builder_ = builder_->FunctionAt(init_function_index_);
current_function_builder_->SetSignature(b.Build());
builder_->MarkStartFunction(init_function_index_);
current_function_builder_ = nullptr;
init_function_ = builder_->AddFunction(b.Build());
builder_->MarkStartFunction(init_function_);
}
void BuildForeignInitFunction() {
foreign_init_function_index_ = builder_->AddFunction();
foreign_init_function_ = builder_->AddFunction();
FunctionSig::Builder b(zone(), 0, foreign_variables_.size());
for (auto i = foreign_variables_.begin(); i != foreign_variables_.end();
++i) {
b.AddParam(i->type);
}
current_function_builder_ =
builder_->FunctionAt(foreign_init_function_index_);
current_function_builder_->SetExported();
foreign_init_function_->SetExported();
std::string raw_name = "__foreign_init__";
current_function_builder_->SetName(raw_name.data(),
static_cast<int>(raw_name.size()));
current_function_builder_->SetSignature(b.Build());
foreign_init_function_->SetName(
AsmWasmBuilder::foreign_init_name,
static_cast<int>(strlen(AsmWasmBuilder::foreign_init_name)));
foreign_init_function_->SetName(raw_name.data(),
static_cast<int>(raw_name.size()));
foreign_init_function_->SetSignature(b.Build());
for (size_t pos = 0; pos < foreign_variables_.size(); ++pos) {
current_function_builder_->EmitGetLocal(static_cast<uint32_t>(pos));
foreign_init_function_->EmitGetLocal(static_cast<uint32_t>(pos));
ForeignVariable* fv = &foreign_variables_[pos];
uint32_t index = LookupOrInsertGlobal(fv->var, fv->type);
current_function_builder_->EmitWithVarInt(kExprSetGlobal, index);
foreign_init_function_->EmitWithVarInt(kExprSetGlobal, index);
}
current_function_builder_ = nullptr;
}
i::Handle<i::FixedArray> GetForeignArgs() {
@ -124,8 +119,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
void VisitFunctionDeclaration(FunctionDeclaration* decl) {
DCHECK_EQ(kModuleScope, scope_);
DCHECK_NULL(current_function_builder_);
uint32_t index = LookupOrInsertFunction(decl->proxy()->var());
current_function_builder_ = builder_->FunctionAt(index);
current_function_builder_ = LookupOrInsertFunction(decl->proxy()->var());
scope_ = kFuncScope;
RECURSE(Visit(decl->fun()));
scope_ = kModuleScope;
@ -157,8 +151,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
}
if (scope_ == kFuncScope) {
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock,
false);
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock);
RECURSE(VisitStatements(stmt->statements()));
} else {
RECURSE(VisitStatements(stmt->statements()));
@ -171,10 +164,12 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
public:
BlockVisitor(AsmWasmBuilderImpl* builder, BreakableStatement* stmt,
WasmOpcode opcode, bool is_loop)
WasmOpcode opcode)
: builder_(builder) {
builder_->breakable_blocks_.push_back(std::make_pair(stmt, is_loop));
builder_->current_function_builder_->Emit(opcode);
builder_->breakable_blocks_.push_back(
std::make_pair(stmt, opcode == kExprLoop));
// block and loops have a type immediate.
builder_->current_function_builder_->EmitWithU8(opcode, kLocalVoid);
}
~BlockVisitor() {
builder_->current_function_builder_->Emit(kExprEnd);
@ -183,7 +178,32 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
};
void VisitExpressionStatement(ExpressionStatement* stmt) {
RECURSE(Visit(stmt->expression()));
VisitForEffect(stmt->expression());
}
void VisitForEffect(Expression* expr) {
if (expr->IsAssignment()) {
// Don't emit drops for assignments. Instead use SetLocal/GetLocal.
VisitAssignment(expr->AsAssignment(), kDrop);
return;
}
if (expr->IsCall()) {
// Only emit a drop if the call has a non-void return value.
if (VisitCallExpression(expr->AsCall()) && scope_ == kFuncScope) {
current_function_builder_->Emit(kExprDrop);
}
return;
}
if (expr->IsBinaryOperation()) {
BinaryOperation* binop = expr->AsBinaryOperation();
if (binop->op() == Token::COMMA) {
VisitForEffect(binop->left());
VisitForEffect(binop->right());
return;
}
}
RECURSE(Visit(expr));
if (scope_ == kFuncScope) current_function_builder_->Emit(kExprDrop);
}
void VisitEmptyStatement(EmptyStatement* stmt) {}
@ -193,7 +213,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
void VisitIfStatement(IfStatement* stmt) {
DCHECK_EQ(kFuncScope, scope_);
RECURSE(Visit(stmt->condition()));
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
// WASM ifs come with implement blocks for both arms.
breakable_blocks_.push_back(std::make_pair(nullptr, false));
if (stmt->HasThenStatement()) {
@ -207,48 +227,26 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
breakable_blocks_.pop_back();
}
void VisitContinueStatement(ContinueStatement* stmt) {
void DoBreakOrContinue(BreakableStatement* target, bool is_continue) {
DCHECK_EQ(kFuncScope, scope_);
DCHECK_NOT_NULL(stmt->target());
int i = static_cast<int>(breakable_blocks_.size()) - 1;
int block_distance = 0;
for (; i >= 0; i--) {
for (int i = static_cast<int>(breakable_blocks_.size()) - 1; i >= 0; --i) {
auto elem = breakable_blocks_.at(i);
if (elem.first == stmt->target()) {
DCHECK(elem.second);
break;
} else if (elem.second) {
block_distance += 2;
} else {
block_distance += 1;
if (elem.first == target && elem.second == is_continue) {
int block_distance = static_cast<int>(breakable_blocks_.size() - i - 1);
current_function_builder_->Emit(kExprBr);
current_function_builder_->EmitVarInt(block_distance);
return;
}
}
DCHECK(i >= 0);
current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
current_function_builder_->EmitVarInt(block_distance);
UNREACHABLE(); // statement not found
}
void VisitContinueStatement(ContinueStatement* stmt) {
DoBreakOrContinue(stmt->target(), true);
}
void VisitBreakStatement(BreakStatement* stmt) {
DCHECK_EQ(kFuncScope, scope_);
DCHECK_NOT_NULL(stmt->target());
int i = static_cast<int>(breakable_blocks_.size()) - 1;
int block_distance = 0;
for (; i >= 0; i--) {
auto elem = breakable_blocks_.at(i);
if (elem.first == stmt->target()) {
if (elem.second) {
block_distance++;
}
break;
} else if (elem.second) {
block_distance += 2;
} else {
block_distance += 1;
}
}
DCHECK(i >= 0);
current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
current_function_builder_->EmitVarInt(block_distance);
DoBreakOrContinue(stmt->target(), false);
}
void VisitReturnStatement(ReturnStatement* stmt) {
@ -258,9 +256,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
scope_ = kModuleScope;
} else if (scope_ == kFuncScope) {
RECURSE(Visit(stmt->expression()));
uint8_t arity =
TypeOf(stmt->expression()) == kAstStmt ? ARITY_0 : ARITY_1;
current_function_builder_->EmitWithU8(kExprReturn, arity);
current_function_builder_->Emit(kExprReturn);
} else {
UNREACHABLE();
}
@ -276,7 +272,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
VisitVariableProxy(tag);
current_function_builder_->EmitI32Const(node->begin);
current_function_builder_->Emit(kExprI32LtS);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
if_depth++;
breakable_blocks_.push_back(std::make_pair(nullptr, false));
HandleCase(node->left, case_to_block, tag, default_block, if_depth);
@ -286,7 +282,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
VisitVariableProxy(tag);
current_function_builder_->EmitI32Const(node->end);
current_function_builder_->Emit(kExprI32GtS);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
if_depth++;
breakable_blocks_.push_back(std::make_pair(nullptr, false));
HandleCase(node->right, case_to_block, tag, default_block, if_depth);
@ -296,9 +292,9 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
VisitVariableProxy(tag);
current_function_builder_->EmitI32Const(node->begin);
current_function_builder_->Emit(kExprI32Eq);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
DCHECK(case_to_block.find(node->begin) != case_to_block.end());
current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
current_function_builder_->Emit(kExprBr);
current_function_builder_->EmitVarInt(1 + if_depth +
case_to_block[node->begin]);
current_function_builder_->Emit(kExprEnd);
@ -310,22 +306,22 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
} else {
VisitVariableProxy(tag);
}
current_function_builder_->EmitWithU8(kExprBrTable, ARITY_0);
current_function_builder_->Emit(kExprBrTable);
current_function_builder_->EmitVarInt(node->end - node->begin + 1);
for (int v = node->begin; v <= node->end; v++) {
for (int v = node->begin; v <= node->end; ++v) {
if (case_to_block.find(v) != case_to_block.end()) {
byte break_code[] = {BR_TARGET(if_depth + case_to_block[v])};
current_function_builder_->EmitCode(break_code, sizeof(break_code));
uint32_t target = if_depth + case_to_block[v];
current_function_builder_->EmitVarInt(target);
} else {
byte break_code[] = {BR_TARGET(if_depth + default_block)};
current_function_builder_->EmitCode(break_code, sizeof(break_code));
uint32_t target = if_depth + default_block;
current_function_builder_->EmitVarInt(target);
}
if (v == kMaxInt) {
break;
}
}
byte break_code[] = {BR_TARGET(if_depth + default_block)};
current_function_builder_->EmitCode(break_code, sizeof(break_code));
uint32_t target = if_depth + default_block;
current_function_builder_->EmitVarInt(target);
}
while (if_depth-- != prev_if_depth) {
@ -342,14 +338,14 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
if (case_count == 0) {
return;
}
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock, false);
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprBlock);
ZoneVector<BlockVisitor*> blocks(zone_);
ZoneVector<int32_t> cases(zone_);
ZoneMap<int, unsigned int> case_to_block(zone_);
bool has_default = false;
for (int i = case_count - 1; i >= 0; i--) {
for (int i = case_count - 1; i >= 0; --i) {
CaseClause* clause = clauses->at(i);
blocks.push_back(new BlockVisitor(this, nullptr, kExprBlock, false));
blocks.push_back(new BlockVisitor(this, nullptr, kExprBlock));
if (!clause->is_default()) {
Literal* label = clause->label()->AsLiteral();
Handle<Object> value = label->value();
@ -366,12 +362,12 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
if (!has_default || case_count > 1) {
int default_block = has_default ? case_count - 1 : case_count;
BlockVisitor switch_logic_block(this, nullptr, kExprBlock, false);
BlockVisitor switch_logic_block(this, nullptr, kExprBlock);
CaseNode* root = OrderCases(&cases, zone_);
HandleCase(root, case_to_block, tag, default_block, 0);
if (root->left != nullptr || root->right != nullptr ||
root->begin == root->end) {
current_function_builder_->EmitWithU8(kExprBr, ARITY_0);
current_function_builder_->Emit(kExprBr);
current_function_builder_->EmitVarInt(default_block);
}
}
@ -388,22 +384,24 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
void VisitDoWhileStatement(DoWhileStatement* stmt) {
DCHECK_EQ(kFuncScope, scope_);
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
RECURSE(Visit(stmt->body()));
RECURSE(Visit(stmt->cond()));
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 1);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
current_function_builder_->EmitWithU8(kExprBr, 1);
current_function_builder_->Emit(kExprEnd);
}
void VisitWhileStatement(WhileStatement* stmt) {
DCHECK_EQ(kFuncScope, scope_);
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
RECURSE(Visit(stmt->cond()));
breakable_blocks_.push_back(std::make_pair(nullptr, false));
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
RECURSE(Visit(stmt->body()));
current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 1);
current_function_builder_->EmitWithU8(kExprBr, 1);
current_function_builder_->Emit(kExprEnd);
breakable_blocks_.pop_back();
}
@ -413,13 +411,13 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
if (stmt->init() != nullptr) {
RECURSE(Visit(stmt->init()));
}
BlockVisitor visitor(this, stmt->AsBreakableStatement(), kExprLoop, true);
BlockVisitor block(this, stmt->AsBreakableStatement(), kExprBlock);
BlockVisitor loop(this, stmt->AsBreakableStatement(), kExprLoop);
if (stmt->cond() != nullptr) {
RECURSE(Visit(stmt->cond()));
current_function_builder_->Emit(kExprI32Eqz);
current_function_builder_->Emit(kExprIf);
current_function_builder_->Emit(kExprNop);
current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 2);
current_function_builder_->EmitWithU8(kExprIf, kLocalVoid);
current_function_builder_->EmitWithU8(kExprBr, 2);
current_function_builder_->Emit(kExprEnd);
}
if (stmt->body() != nullptr) {
@ -428,8 +426,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
if (stmt->next() != nullptr) {
RECURSE(Visit(stmt->next()));
}
current_function_builder_->Emit(kExprNop);
current_function_builder_->EmitWithU8U8(kExprBr, ARITY_0, 0);
current_function_builder_->EmitWithU8(kExprBr, 0);
}
void VisitForInStatement(ForInStatement* stmt) { UNREACHABLE(); }
@ -446,19 +443,13 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
DeclarationScope* scope = expr->scope();
if (scope_ == kFuncScope) {
if (auto* func_type = typer_->TypeOf(expr)->AsFunctionType()) {
// Build the signature for the function.
LocalType return_type = TypeFrom(func_type->ReturnType());
// Add the parameters for the function.
const auto& arguments = func_type->Arguments();
FunctionSig::Builder b(zone(), return_type == kAstStmt ? 0 : 1,
arguments.size());
if (return_type != kAstStmt) b.AddReturn(return_type);
for (int i = 0; i < expr->parameter_count(); ++i) {
LocalType type = TypeFrom(arguments[i]);
DCHECK_NE(kAstStmt, type);
b.AddParam(type);
InsertParameter(scope->parameter(i), type, i);
}
current_function_builder_->SetSignature(b.Build());
} else {
UNREACHABLE();
}
@ -476,7 +467,24 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
RECURSE(Visit(expr->condition()));
// WASM ifs come with implicit blocks for both arms.
breakable_blocks_.push_back(std::make_pair(nullptr, false));
current_function_builder_->Emit(kExprIf);
LocalTypeCode type;
switch (TypeOf(expr)) {
case kAstI32:
type = kLocalI32;
break;
case kAstI64:
type = kLocalI64;
break;
case kAstF32:
type = kLocalF32;
break;
case kAstF64:
type = kLocalF64;
break;
default:
UNREACHABLE();
}
current_function_builder_->EmitWithU8(kExprIf, type);
RECURSE(Visit(expr->then_expression()));
current_function_builder_->Emit(kExprElse);
RECURSE(Visit(expr->else_expression()));
@ -551,12 +559,22 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
current_function_builder_->EmitGetLocal(
LookupOrInsertLocal(var, var_type));
}
} else if (scope_ == kExportScope) {
Variable* var = expr->var();
DCHECK(var->is_function());
WasmFunctionBuilder* function = LookupOrInsertFunction(var);
function->SetExported();
function->SetName(
AsmWasmBuilder::single_function_name,
static_cast<int>(strlen(AsmWasmBuilder::single_function_name)));
}
}
void VisitLiteral(Literal* expr) {
Handle<Object> value = expr->value();
if (!value->IsNumber() || (scope_ != kFuncScope && scope_ != kInitScope)) {
if (!(value->IsNumber() || expr->raw_value()->IsTrue() ||
expr->raw_value()->IsFalse()) ||
(scope_ != kFuncScope && scope_ != kInitScope)) {
return;
}
AsmType* type = typer_->TypeOf(expr);
@ -577,10 +595,40 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
int32_t i = static_cast<int32_t>(u);
byte code[] = {WASM_I32V(i)};
current_function_builder_->EmitCode(code, sizeof(code));
} else if (type->IsA(AsmType::Int())) {
// The parser can collapse !0, !1 etc to true / false.
// Allow these as int literals.
if (expr->raw_value()->IsTrue()) {
byte code[] = {WASM_I32V(1)};
current_function_builder_->EmitCode(code, sizeof(code));
} else if (expr->raw_value()->IsFalse()) {
byte code[] = {WASM_I32V(0)};
current_function_builder_->EmitCode(code, sizeof(code));
} else if (expr->raw_value()->IsNumber()) {
// This can happen when -x becomes x * -1 (due to the parser).
int32_t i = 0;
if (!value->ToInt32(&i) || i != -1) {
UNREACHABLE();
}
byte code[] = {WASM_I32V(i)};
current_function_builder_->EmitCode(code, sizeof(code));
} else {
UNREACHABLE();
}
} else if (type->IsA(AsmType::Double())) {
// TODO(bradnelson): Pattern match the case where negation occurs and
// emit f64.neg instead.
double val = expr->raw_value()->AsNumber();
byte code[] = {WASM_F64(val)};
current_function_builder_->EmitCode(code, sizeof(code));
} else if (type->IsA(AsmType::Float())) {
// This can happen when -fround(x) becomes fround(x) * 1.0[float]
// (due to the parser).
// TODO(bradnelson): Pattern match this and emit f32.neg instead.
double val = expr->raw_value()->AsNumber();
DCHECK_EQ(-1.0, val);
byte code[] = {WASM_F32(val)};
current_function_builder_->EmitCode(code, sizeof(code));
} else {
UNREACHABLE();
}
@ -601,11 +649,10 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
DCHECK(name->IsPropertyName());
const AstRawString* raw_name = name->AsRawPropertyName();
if (var->is_function()) {
uint32_t index = LookupOrInsertFunction(var);
builder_->FunctionAt(index)->SetExported();
builder_->FunctionAt(index)->SetName(
reinterpret_cast<const char*>(raw_name->raw_data()),
raw_name->length());
WasmFunctionBuilder* function = LookupOrInsertFunction(var);
function->SetExported();
function->SetName(reinterpret_cast<const char*>(raw_name->raw_data()),
raw_name->length());
}
}
}
@ -613,7 +660,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
void VisitArrayLiteral(ArrayLiteral* expr) { UNREACHABLE(); }
void LoadInitFunction() {
current_function_builder_ = builder_->FunctionAt(init_function_index_);
current_function_builder_ = init_function_;
scope_ = kInitScope;
}
@ -642,7 +689,8 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
for (int i = 0; i < funcs->values()->length(); ++i) {
VariableProxy* func = funcs->values()->at(i)->AsVariableProxy();
DCHECK_NOT_NULL(func);
builder_->AddIndirectFunction(LookupOrInsertFunction(func->var()));
builder_->AddIndirectFunction(
LookupOrInsertFunction(func->var())->func_index());
}
}
@ -684,20 +732,20 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
public:
explicit ImportedFunctionTable(AsmWasmBuilderImpl* builder)
: table_(base::HashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity,
: table_(ZoneHashMap::kDefaultHashMapCapacity,
ZoneAllocationPolicy(builder->zone())),
builder_(builder) {}
void AddImport(Variable* v, const char* name, int name_length) {
ImportedFunctionIndices* indices = new (builder_->zone())
ImportedFunctionIndices(name, name_length, builder_->zone());
ZoneHashMap::Entry* entry = table_.LookupOrInsert(
auto* entry = table_.LookupOrInsert(
v, ComputePointerHash(v), ZoneAllocationPolicy(builder_->zone()));
entry->value = indices;
}
uint32_t GetFunctionIndex(Variable* v, FunctionSig* sig) {
// Get a function's index (or allocate if new).
uint32_t LookupOrInsertImport(Variable* v, FunctionSig* sig) {
ZoneHashMap::Entry* entry = table_.Lookup(v, ComputePointerHash(v));
DCHECK_NOT_NULL(entry);
ImportedFunctionIndices* indices =
@ -774,7 +822,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
RECURSE(Visit(value));
}
void EmitAssignment(Assignment* expr, MachineType type) {
void EmitAssignment(Assignment* expr, MachineType type, ValueFate fate) {
// Match the left hand side of the assignment.
VariableProxy* target_var = expr->target()->AsVariableProxy();
if (target_var != nullptr) {
@ -783,11 +831,19 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
LocalType var_type = TypeOf(expr);
DCHECK_NE(kAstStmt, var_type);
if (var->IsContextSlot()) {
current_function_builder_->EmitWithVarInt(
kExprSetGlobal, LookupOrInsertGlobal(var, var_type));
uint32_t index = LookupOrInsertGlobal(var, var_type);
current_function_builder_->EmitWithVarInt(kExprSetGlobal, index);
if (fate == kLeaveOnStack) {
current_function_builder_->EmitWithVarInt(kExprGetGlobal, index);
}
} else {
current_function_builder_->EmitSetLocal(
LookupOrInsertLocal(var, var_type));
if (fate == kDrop) {
current_function_builder_->EmitSetLocal(
LookupOrInsertLocal(var, var_type));
} else {
current_function_builder_->EmitTeeLocal(
LookupOrInsertLocal(var, var_type));
}
}
}
@ -799,6 +855,7 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
->IsA(AsmType::Float32Array())) {
current_function_builder_->Emit(kExprF32ConvertF64);
}
// Note that unlike StoreMem, AsmjsStoreMem ignores out-of-bounds writes.
WasmOpcode opcode;
if (type == MachineType::Int8()) {
opcode = kExprI32AsmjsStoreMem8;
@ -820,6 +877,10 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
UNREACHABLE();
}
current_function_builder_->Emit(opcode);
if (fate == kDrop) {
// Asm.js stores to memory leave their result on the stack.
current_function_builder_->Emit(kExprDrop);
}
}
if (target_var == nullptr && target_prop == nullptr) {
@ -828,12 +889,16 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
void VisitAssignment(Assignment* expr) {
VisitAssignment(expr, kLeaveOnStack);
}
void VisitAssignment(Assignment* expr, ValueFate fate) {
bool as_init = false;
if (scope_ == kModuleScope) {
// Skip extra assignment inserted by the parser when in this form:
// (function Module(a, b, c) {... })
if (expr->target()->IsVariableProxy() &&
expr->target()->AsVariableProxy()->var()->mode() == CONST_LEGACY) {
expr->target()->AsVariableProxy()->var()->is_sloppy_function_name()) {
return;
}
Property* prop = expr->value()->AsProperty();
@ -873,12 +938,12 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
if (as_init) LoadInitFunction();
MachineType mtype;
MachineType mtype = MachineType::None();
bool is_nop = false;
EmitAssignmentLhs(expr->target(), &mtype);
EmitAssignmentRhs(expr->target(), expr->value(), &is_nop);
if (!is_nop) {
EmitAssignment(expr, mtype);
EmitAssignment(expr, mtype, fate);
}
if (as_init) UnLoadInitFunction();
}
@ -1099,24 +1164,24 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
case AsmTyper::kMathAbs: {
if (call_type == kAstI32) {
uint32_t tmp = current_function_builder_->AddLocal(kAstI32);
WasmTemporary tmp(current_function_builder_, kAstI32);
// if set_local(tmp, x) < 0
Visit(call->arguments()->at(0));
current_function_builder_->EmitSetLocal(tmp);
current_function_builder_->EmitTeeLocal(tmp.index());
byte code[] = {WASM_I8(0)};
current_function_builder_->EmitCode(code, sizeof(code));
current_function_builder_->Emit(kExprI32LtS);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
// then (0 - tmp)
current_function_builder_->EmitCode(code, sizeof(code));
current_function_builder_->EmitGetLocal(tmp);
current_function_builder_->EmitGetLocal(tmp.index());
current_function_builder_->Emit(kExprI32Sub);
// else tmp
current_function_builder_->Emit(kExprElse);
current_function_builder_->EmitGetLocal(tmp);
current_function_builder_->EmitGetLocal(tmp.index());
// end
current_function_builder_->Emit(kExprEnd);
@ -1134,25 +1199,25 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
case AsmTyper::kMathMin: {
// TODO(bradnelson): Change wasm to match Math.min in asm.js mode.
if (call_type == kAstI32) {
uint32_t tmp_x = current_function_builder_->AddLocal(kAstI32);
uint32_t tmp_y = current_function_builder_->AddLocal(kAstI32);
WasmTemporary tmp_x(current_function_builder_, kAstI32);
WasmTemporary tmp_y(current_function_builder_, kAstI32);
// if set_local(tmp_x, x) < set_local(tmp_y, y)
Visit(call->arguments()->at(0));
current_function_builder_->EmitSetLocal(tmp_x);
current_function_builder_->EmitTeeLocal(tmp_x.index());
Visit(call->arguments()->at(1));
current_function_builder_->EmitSetLocal(tmp_y);
current_function_builder_->EmitTeeLocal(tmp_y.index());
current_function_builder_->Emit(kExprI32LeS);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
// then tmp_x
current_function_builder_->EmitGetLocal(tmp_x);
current_function_builder_->EmitGetLocal(tmp_x.index());
// else tmp_y
current_function_builder_->Emit(kExprElse);
current_function_builder_->EmitGetLocal(tmp_y);
current_function_builder_->EmitGetLocal(tmp_y.index());
current_function_builder_->Emit(kExprEnd);
} else if (call_type == kAstF32) {
@ -1169,26 +1234,26 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
case AsmTyper::kMathMax: {
// TODO(bradnelson): Change wasm to match Math.max in asm.js mode.
if (call_type == kAstI32) {
uint32_t tmp_x = current_function_builder_->AddLocal(kAstI32);
uint32_t tmp_y = current_function_builder_->AddLocal(kAstI32);
WasmTemporary tmp_x(current_function_builder_, kAstI32);
WasmTemporary tmp_y(current_function_builder_, kAstI32);
// if set_local(tmp_x, x) < set_local(tmp_y, y)
Visit(call->arguments()->at(0));
current_function_builder_->EmitSetLocal(tmp_x);
current_function_builder_->EmitTeeLocal(tmp_x.index());
Visit(call->arguments()->at(1));
current_function_builder_->EmitSetLocal(tmp_y);
current_function_builder_->EmitTeeLocal(tmp_y.index());
current_function_builder_->Emit(kExprI32LeS);
current_function_builder_->Emit(kExprIf);
current_function_builder_->EmitWithU8(kExprIf, kLocalI32);
// then tmp_y
current_function_builder_->EmitGetLocal(tmp_y);
current_function_builder_->EmitGetLocal(tmp_y.index());
// else tmp_x
current_function_builder_->Emit(kExprElse);
current_function_builder_->EmitGetLocal(tmp_x);
current_function_builder_->EmitGetLocal(tmp_x.index());
current_function_builder_->Emit(kExprEnd);
} else if (call_type == kAstF32) {
@ -1267,18 +1332,23 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
}
}
void VisitCall(Call* expr) {
void VisitCall(Call* expr) { VisitCallExpression(expr); }
bool VisitCallExpression(Call* expr) {
Call::CallType call_type = expr->GetCallType();
bool returns_value = true;
switch (call_type) {
case Call::OTHER_CALL: {
DCHECK_EQ(kFuncScope, scope_);
VariableProxy* proxy = expr->expression()->AsVariableProxy();
if (proxy != nullptr) {
DCHECK(kFuncScope == scope_ ||
typer_->VariableAsStandardMember(proxy->var()) ==
AsmTyper::kMathFround);
if (VisitStdlibFunction(expr, proxy)) {
return;
return true;
}
}
uint32_t index;
DCHECK(kFuncScope == scope_);
VariableProxy* vp = expr->expression()->AsVariableProxy();
DCHECK_NOT_NULL(vp);
if (typer_->TypeOf(vp)->AsFFIType() != nullptr) {
@ -1288,22 +1358,24 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
args->length());
if (return_type != kAstStmt) {
sig.AddReturn(return_type);
} else {
returns_value = false;
}
for (int i = 0; i < args->length(); ++i) {
sig.AddParam(TypeOf(args->at(i)));
}
index =
imported_function_table_.GetFunctionIndex(vp->var(), sig.Build());
VisitCallArgs(expr);
current_function_builder_->Emit(kExprCallImport);
current_function_builder_->EmitVarInt(expr->arguments()->length());
current_function_builder_->EmitVarInt(index);
} else {
index = LookupOrInsertFunction(vp->var());
uint32_t index = imported_function_table_.LookupOrInsertImport(
vp->var(), sig.Build());
VisitCallArgs(expr);
current_function_builder_->Emit(kExprCallFunction);
current_function_builder_->EmitVarInt(expr->arguments()->length());
current_function_builder_->EmitVarInt(index);
} else {
WasmFunctionBuilder* function = LookupOrInsertFunction(vp->var());
VisitCallArgs(expr);
current_function_builder_->Emit(kExprCallFunction);
current_function_builder_->EmitDirectCallIndex(
function->func_index());
returns_value = function->signature()->return_count() > 0;
}
break;
}
@ -1314,18 +1386,28 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
VariableProxy* var = p->obj()->AsVariableProxy();
DCHECK_NOT_NULL(var);
FunctionTableIndices* indices = LookupFunctionTable(var->var());
RECURSE(Visit(p->key()));
Visit(p->key()); // TODO(titzer): should use RECURSE()
// We have to use a temporary for the correct order of evaluation.
current_function_builder_->EmitI32Const(indices->start_index);
current_function_builder_->Emit(kExprI32Add);
WasmTemporary tmp(current_function_builder_, kAstI32);
current_function_builder_->EmitSetLocal(tmp.index());
VisitCallArgs(expr);
current_function_builder_->EmitGetLocal(tmp.index());
current_function_builder_->Emit(kExprCallIndirect);
current_function_builder_->EmitVarInt(expr->arguments()->length());
current_function_builder_->EmitVarInt(indices->signature_index);
returns_value =
builder_->GetSignature(indices->signature_index)->return_count() >
0;
break;
}
default:
UNREACHABLE();
}
return returns_value;
}
void VisitCallNew(CallNew* expr) { UNREACHABLE(); }
@ -1511,16 +1593,13 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
RECURSE(Visit(GetLeft(expr)));
} else {
if (expr->op() == Token::COMMA) {
current_function_builder_->Emit(kExprBlock);
RECURSE(VisitForEffect(expr->left()));
RECURSE(Visit(expr->right()));
return;
}
RECURSE(Visit(expr->left()));
RECURSE(Visit(expr->right()));
if (expr->op() == Token::COMMA) {
current_function_builder_->Emit(kExprEnd);
}
switch (expr->op()) {
BINOP_CASE(Token::ADD, Add, NON_SIGNED_BINOP, true);
BINOP_CASE(Token::SUB, Sub, NON_SIGNED_BINOP, true);
@ -1720,18 +1799,33 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
return (reinterpret_cast<IndexContainer*>(entry->value))->index;
}
uint32_t LookupOrInsertFunction(Variable* v) {
WasmFunctionBuilder* LookupOrInsertFunction(Variable* v) {
DCHECK_NOT_NULL(builder_);
ZoneHashMap::Entry* entry = functions_.Lookup(v, ComputePointerHash(v));
if (entry == nullptr) {
uint32_t index = builder_->AddFunction();
IndexContainer* container = new (zone()) IndexContainer();
container->index = index;
auto* func_type = typer_->TypeOf(v)->AsFunctionType();
DCHECK_NOT_NULL(func_type);
// Build the signature for the function.
LocalType return_type = TypeFrom(func_type->ReturnType());
const auto& arguments = func_type->Arguments();
FunctionSig::Builder b(zone(), return_type == kAstStmt ? 0 : 1,
arguments.size());
if (return_type != kAstStmt) b.AddReturn(return_type);
for (int i = 0; i < static_cast<int>(arguments.size()); ++i) {
LocalType type = TypeFrom(arguments[i]);
DCHECK_NE(kAstStmt, type);
b.AddParam(type);
}
WasmFunctionBuilder* function = builder_->AddFunction(b.Build());
entry = functions_.LookupOrInsert(v, ComputePointerHash(v),
ZoneAllocationPolicy(zone()));
entry->value = container;
function->SetName(
reinterpret_cast<const char*>(v->raw_name()->raw_data()),
v->raw_name()->length());
entry->value = function;
}
return (reinterpret_cast<IndexContainer*>(entry->value))->index;
return (reinterpret_cast<WasmFunctionBuilder*>(entry->value));
}
LocalType TypeOf(Expression* expr) { return TypeFrom(typer_->TypeOf(expr)); }
@ -1766,8 +1860,8 @@ class AsmWasmBuilderImpl final : public AstVisitor<AsmWasmBuilderImpl> {
AsmTyper* typer_;
ZoneVector<std::pair<BreakableStatement*, bool>> breakable_blocks_;
ZoneVector<ForeignVariable> foreign_variables_;
uint32_t init_function_index_;
uint32_t foreign_init_function_index_;
WasmFunctionBuilder* init_function_;
WasmFunctionBuilder* foreign_init_function_;
uint32_t next_table_index_;
ZoneHashMap function_tables_;
ImportedFunctionTable imported_function_table_;
@ -1792,6 +1886,10 @@ ZoneBuffer* AsmWasmBuilder::Run(i::Handle<i::FixedArray>* foreign_args) {
impl.builder_->WriteTo(*buffer);
return buffer;
}
const char* AsmWasmBuilder::foreign_init_name = "__foreign_init__";
const char* AsmWasmBuilder::single_function_name = "__single_function__";
} // namespace wasm
} // namespace internal
} // namespace v8

7
deps/v8/src/asmjs/asm-wasm-builder.h поставляемый
Просмотреть файл

@ -8,8 +8,8 @@
#include "src/allocation.h"
#include "src/asmjs/asm-typer.h"
#include "src/objects.h"
#include "src/wasm/encoder.h"
#include "src/zone.h"
#include "src/wasm/wasm-module-builder.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
@ -24,6 +24,9 @@ class AsmWasmBuilder {
AsmTyper* typer);
ZoneBuffer* Run(Handle<FixedArray>* foreign_args);
static const char* foreign_init_name;
static const char* single_function_name;
private:
Isolate* isolate_;
Zone* zone_;

49
deps/v8/src/assembler.cc поставляемый
Просмотреть файл

@ -120,7 +120,7 @@ double min_int;
double one_half;
double minus_one_half;
double negative_infinity;
double the_hole_nan;
uint64_t the_hole_nan;
double uint32_bias;
};
@ -190,6 +190,7 @@ void AssemblerBase::FlushICache(Isolate* isolate, void* start, size_t size) {
if (size == 0) return;
#if defined(USE_SIMULATOR)
base::LockGuard<base::Mutex> lock_guard(isolate->simulator_i_cache_mutex());
Simulator::FlushICache(isolate->simulator_i_cache(), start, size);
#else
CpuFeatures::FlushICache(start, size);
@ -233,22 +234,14 @@ PredictableCodeSizeScope::~PredictableCodeSizeScope() {
// Implementation of CpuFeatureScope
#ifdef DEBUG
CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f)
CpuFeatureScope::CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
CheckPolicy check)
: assembler_(assembler) {
DCHECK(CpuFeatures::IsSupported(f));
DCHECK_IMPLIES(check == kCheckSupported, CpuFeatures::IsSupported(f));
old_enabled_ = assembler_->enabled_cpu_features();
uint64_t mask = static_cast<uint64_t>(1) << f;
// TODO(svenpanne) This special case below doesn't belong here!
#if V8_TARGET_ARCH_ARM
// ARMv7 is implied by VFP3.
if (f == VFP3) {
mask |= static_cast<uint64_t>(1) << ARMv7;
}
#endif
assembler_->set_enabled_cpu_features(old_enabled_ | mask);
assembler_->EnableCpuFeature(f);
}
CpuFeatureScope::~CpuFeatureScope() {
assembler_->set_enabled_cpu_features(old_enabled_);
}
@ -350,19 +343,18 @@ void RelocInfo::update_wasm_memory_reference(
DCHECK(IsWasmMemoryReference(rmode_) || IsWasmMemorySizeReference(rmode_));
if (IsWasmMemoryReference(rmode_)) {
Address updated_reference;
DCHECK(old_size == 0 || Memory::IsAddressInRange(
old_base, wasm_memory_reference(), old_size));
DCHECK_GE(wasm_memory_reference(), old_base);
updated_reference = new_base + (wasm_memory_reference() - old_base);
DCHECK(new_size == 0 ||
Memory::IsAddressInRange(new_base, updated_reference, new_size));
// The reference is not checked here but at runtime. Validity of references
// may change over time.
unchecked_update_wasm_memory_reference(updated_reference,
icache_flush_mode);
} else if (IsWasmMemorySizeReference(rmode_)) {
uint32_t updated_size_reference;
DCHECK(old_size == 0 || wasm_memory_size_reference() <= old_size);
updated_size_reference =
new_size + (wasm_memory_size_reference() - old_size);
DCHECK(updated_size_reference <= new_size);
uint32_t current_size_reference = wasm_memory_size_reference();
DCHECK(old_size == 0 || current_size_reference <= old_size);
uint32_t offset = old_size - current_size_reference;
DCHECK_GE(new_size, offset);
uint32_t updated_size_reference = new_size - offset;
unchecked_update_wasm_memory_size(updated_size_reference,
icache_flush_mode);
} else {
@ -930,7 +922,7 @@ void ExternalReference::SetUp() {
double_constants.min_int = kMinInt;
double_constants.one_half = 0.5;
double_constants.minus_one_half = -0.5;
double_constants.the_hole_nan = bit_cast<double>(kHoleNanInt64);
double_constants.the_hole_nan = kHoleNanInt64;
double_constants.negative_infinity = -V8_INFINITY;
double_constants.uint32_bias =
static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
@ -1601,17 +1593,6 @@ ExternalReference ExternalReference::debug_after_break_target_address(
}
ExternalReference ExternalReference::virtual_handler_register(
Isolate* isolate) {
return ExternalReference(isolate->virtual_handler_register_address());
}
ExternalReference ExternalReference::virtual_slot_register(Isolate* isolate) {
return ExternalReference(isolate->virtual_slot_register_address());
}
ExternalReference ExternalReference::runtime_function_table_address(
Isolate* isolate) {
return ExternalReference(

19
deps/v8/src/assembler.h поставляемый
Просмотреть файл

@ -80,9 +80,14 @@ class AssemblerBase: public Malloced {
void set_enabled_cpu_features(uint64_t features) {
enabled_cpu_features_ = features;
}
// Features are usually enabled by CpuFeatureScope, which also asserts that
// the features are supported before they are enabled.
bool IsEnabled(CpuFeature f) {
return (enabled_cpu_features_ & (static_cast<uint64_t>(1) << f)) != 0;
}
void EnableCpuFeature(CpuFeature f) {
enabled_cpu_features_ |= (static_cast<uint64_t>(1) << f);
}
bool is_constant_pool_available() const {
if (FLAG_enable_embedded_constant_pool) {
@ -184,15 +189,22 @@ class PredictableCodeSizeScope {
// Enable a specified feature within a scope.
class CpuFeatureScope BASE_EMBEDDED {
public:
enum CheckPolicy {
kCheckSupported,
kDontCheckSupported,
};
#ifdef DEBUG
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f);
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
CheckPolicy check = kCheckSupported);
~CpuFeatureScope();
private:
AssemblerBase* assembler_;
uint64_t old_enabled_;
#else
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f) {}
CpuFeatureScope(AssemblerBase* assembler, CpuFeature f,
CheckPolicy check = kCheckSupported) {}
#endif
};
@ -1035,9 +1047,6 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference invoke_function_callback(Isolate* isolate);
static ExternalReference invoke_accessor_getter_callback(Isolate* isolate);
static ExternalReference virtual_handler_register(Isolate* isolate);
static ExternalReference virtual_slot_register(Isolate* isolate);
static ExternalReference runtime_function_table_address(Isolate* isolate);
Address address() const { return reinterpret_cast<Address>(address_); }

8
deps/v8/src/assert-scope.h поставляемый
Просмотреть файл

@ -7,6 +7,7 @@
#include <stdint.h>
#include "src/base/macros.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
@ -33,14 +34,13 @@ enum PerIsolateAssertType {
COMPILATION_ASSERT
};
template <PerThreadAssertType kType, bool kAllow>
class PerThreadAssertScope {
public:
PerThreadAssertScope();
~PerThreadAssertScope();
V8_EXPORT_PRIVATE PerThreadAssertScope();
V8_EXPORT_PRIVATE ~PerThreadAssertScope();
static bool IsAllowed();
V8_EXPORT_PRIVATE static bool IsAllowed();
private:
PerThreadAssertData* data_;

1
deps/v8/src/ast/OWNERS поставляемый
Просмотреть файл

@ -3,6 +3,7 @@ set noparent
adamk@chromium.org
bmeurer@chromium.org
littledan@chromium.org
marja@chromium.org
mstarzinger@chromium.org
rossberg@chromium.org
verwaest@chromium.org

9
deps/v8/src/ast/ast-expression-rewriter.cc поставляемый
Просмотреть файл

@ -201,11 +201,10 @@ void AstExpressionRewriter::VisitClassLiteral(ClassLiteral* node) {
AST_REWRITE_PROPERTY(FunctionLiteral, node, constructor);
ZoneList<typename ClassLiteral::Property*>* properties = node->properties();
for (int i = 0; i < properties->length(); i++) {
VisitObjectLiteralProperty(properties->at(i));
VisitLiteralProperty(properties->at(i));
}
}
void AstExpressionRewriter::VisitNativeFunctionLiteral(
NativeFunctionLiteral* node) {
REWRITE_THIS(node);
@ -243,13 +242,11 @@ void AstExpressionRewriter::VisitObjectLiteral(ObjectLiteral* node) {
REWRITE_THIS(node);
ZoneList<typename ObjectLiteral::Property*>* properties = node->properties();
for (int i = 0; i < properties->length(); i++) {
VisitObjectLiteralProperty(properties->at(i));
VisitLiteralProperty(properties->at(i));
}
}
void AstExpressionRewriter::VisitObjectLiteralProperty(
ObjectLiteralProperty* property) {
void AstExpressionRewriter::VisitLiteralProperty(LiteralProperty* property) {
if (property == nullptr) return;
AST_REWRITE_PROPERTY(Expression, property, key);
AST_REWRITE_PROPERTY(Expression, property, value);

4
deps/v8/src/ast/ast-expression-rewriter.h поставляемый
Просмотреть файл

@ -9,7 +9,7 @@
#include "src/ast/ast.h"
#include "src/ast/scopes.h"
#include "src/type-info.h"
#include "src/zone.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
@ -33,7 +33,7 @@ class AstExpressionRewriter : public AstVisitor<AstExpressionRewriter> {
virtual void VisitStatements(ZoneList<Statement*>* statements);
virtual void VisitExpressions(ZoneList<Expression*>* expressions);
virtual void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
virtual void VisitLiteralProperty(LiteralProperty* property);
protected:
virtual bool RewriteExpression(Expression* expr) = 0;

9
deps/v8/src/ast/ast-literal-reindexer.cc поставляемый
Просмотреть файл

@ -249,21 +249,18 @@ void AstLiteralReindexer::VisitClassLiteral(ClassLiteral* node) {
VisitVariableProxy(node->class_variable_proxy());
}
for (int i = 0; i < node->properties()->length(); i++) {
VisitObjectLiteralProperty(node->properties()->at(i));
VisitLiteralProperty(node->properties()->at(i));
}
}
void AstLiteralReindexer::VisitObjectLiteral(ObjectLiteral* node) {
UpdateIndex(node);
for (int i = 0; i < node->properties()->length(); i++) {
VisitObjectLiteralProperty(node->properties()->at(i));
VisitLiteralProperty(node->properties()->at(i));
}
}
void AstLiteralReindexer::VisitObjectLiteralProperty(
ObjectLiteralProperty* node) {
void AstLiteralReindexer::VisitLiteralProperty(LiteralProperty* node) {
Visit(node->key());
Visit(node->value());
}

2
deps/v8/src/ast/ast-literal-reindexer.h поставляемый
Просмотреть файл

@ -26,7 +26,7 @@ class AstLiteralReindexer final : public AstVisitor<AstLiteralReindexer> {
void VisitStatements(ZoneList<Statement*>* statements);
void VisitDeclarations(ZoneList<Declaration*>* declarations);
void VisitArguments(ZoneList<Expression*>* arguments);
void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
void VisitLiteralProperty(LiteralProperty* property);
void UpdateIndex(MaterializedLiteral* literal) {
literal->literal_index_ = next_index_++;

63
deps/v8/src/ast/ast-numbering.cc поставляемый
Просмотреть файл

@ -39,7 +39,7 @@ class AstNumberingVisitor final : public AstVisitor<AstNumberingVisitor> {
void VisitStatements(ZoneList<Statement*>* statements);
void VisitDeclarations(ZoneList<Declaration*>* declarations);
void VisitArguments(ZoneList<Expression*>* arguments);
void VisitObjectLiteralProperty(ObjectLiteralProperty* property);
void VisitLiteralProperty(LiteralProperty* property);
int ReserveIdRange(int n) {
int tmp = next_id_;
@ -233,14 +233,6 @@ void AstNumberingVisitor::VisitCountOperation(CountOperation* node) {
void AstNumberingVisitor::VisitBlock(Block* node) {
IncrementNodeCount();
node->set_base_id(ReserveIdRange(Block::num_ids()));
if (FLAG_ignition && node->scope() != nullptr &&
node->scope()->NeedsContext()) {
// Create ScopeInfo while on the main thread to avoid allocation during
// potentially concurrent bytecode generation.
node->scope()->GetScopeInfo(isolate_);
}
if (node->scope() != NULL) VisitDeclarations(node->scope()->declarations());
VisitStatements(node->statements());
}
@ -257,6 +249,27 @@ void AstNumberingVisitor::VisitCallRuntime(CallRuntime* node) {
IncrementNodeCount();
node->set_base_id(ReserveIdRange(CallRuntime::num_ids()));
VisitArguments(node->arguments());
// To support catch prediction within async/await:
//
// The AstNumberingVisitor is when catch prediction currently occurs, and it
// is the only common point that has access to this information. The parser
// just doesn't know yet. Take the following two cases of catch prediction:
//
// try { await fn(); } catch (e) { }
// try { await fn(); } finally { }
//
// When parsing the await that we want to mark as caught or uncaught, it's
// not yet known whether it will be followed by a 'finally' or a 'catch.
// The AstNumberingVisitor is what learns whether it is caught. To make
// the information available later to the runtime, the AstNumberingVisitor
// has to stash it somewhere. Changing the runtime function into another
// one in ast-numbering seemed like a simple and straightforward solution to
// that problem.
if (node->is_jsruntime() &&
node->context_index() == Context::ASYNC_FUNCTION_AWAIT_CAUGHT_INDEX &&
catch_prediction_ == HandlerTable::ASYNC_AWAIT) {
node->set_context_index(Context::ASYNC_FUNCTION_AWAIT_UNCAUGHT_INDEX);
}
}
@ -370,6 +383,7 @@ void AstNumberingVisitor::VisitCompareOperation(CompareOperation* node) {
node->set_base_id(ReserveIdRange(CompareOperation::num_ids()));
Visit(node->left());
Visit(node->right());
ReserveFeedbackSlots(node);
}
@ -444,6 +458,7 @@ void AstNumberingVisitor::VisitCaseClause(CaseClause* node) {
node->set_base_id(ReserveIdRange(CaseClause::num_ids()));
if (!node->is_default()) Visit(node->label());
VisitStatements(node->statements());
ReserveFeedbackSlots(node);
}
@ -470,7 +485,7 @@ void AstNumberingVisitor::VisitClassLiteral(ClassLiteral* node) {
VisitVariableProxy(node->class_variable_proxy());
}
for (int i = 0; i < node->properties()->length(); i++) {
VisitObjectLiteralProperty(node->properties()->at(i));
VisitLiteralProperty(node->properties()->at(i));
}
ReserveFeedbackSlots(node);
}
@ -480,7 +495,7 @@ void AstNumberingVisitor::VisitObjectLiteral(ObjectLiteral* node) {
IncrementNodeCount();
node->set_base_id(ReserveIdRange(node->num_ids()));
for (int i = 0; i < node->properties()->length(); i++) {
VisitObjectLiteralProperty(node->properties()->at(i));
VisitLiteralProperty(node->properties()->at(i));
}
node->BuildConstantProperties(isolate_);
// Mark all computed expressions that are bound to a key that
@ -490,15 +505,12 @@ void AstNumberingVisitor::VisitObjectLiteral(ObjectLiteral* node) {
ReserveFeedbackSlots(node);
}
void AstNumberingVisitor::VisitObjectLiteralProperty(
ObjectLiteralProperty* node) {
void AstNumberingVisitor::VisitLiteralProperty(LiteralProperty* node) {
if (node->is_computed_name()) DisableCrankshaft(kComputedPropertyName);
Visit(node->key());
Visit(node->value());
}
void AstNumberingVisitor::VisitArrayLiteral(ArrayLiteral* node) {
IncrementNodeCount();
node->set_base_id(ReserveIdRange(node->num_ids()));
@ -570,27 +582,22 @@ void AstNumberingVisitor::VisitRewritableExpression(
bool AstNumberingVisitor::Renumber(FunctionLiteral* node) {
DeclarationScope* scope = node->scope();
if (scope->new_target_var()) DisableCrankshaft(kSuperReference);
if (scope->calls_eval()) DisableOptimization(kFunctionCallsEval);
if (scope->calls_eval()) DisableCrankshaft(kFunctionCallsEval);
if (scope->arguments() != NULL && !scope->arguments()->IsStackAllocated()) {
DisableCrankshaft(kContextAllocatedArguments);
}
int rest_index;
if (scope->rest_parameter(&rest_index)) {
if (scope->rest_parameter() != nullptr) {
DisableCrankshaft(kRestParameter);
}
if (FLAG_ignition && scope->NeedsContext() && scope->is_script_scope()) {
// Create ScopeInfo while on the main thread to avoid allocation during
// potentially concurrent bytecode generation.
node->scope()->GetScopeInfo(isolate_);
}
if (IsGeneratorFunction(node->kind()) || IsAsyncFunction(node->kind())) {
// TODO(neis): We may want to allow Turbofan optimization here if
// --turbo-from-bytecode is set and we know that Ignition is used.
// Unfortunately we can't express that here.
DisableOptimization(kGenerator);
// Generators can be optimized if --turbo-from-bytecode is set.
if (FLAG_turbo_from_bytecode) {
DisableCrankshaft(kGenerator);
} else {
DisableOptimization(kGenerator);
}
}
VisitDeclarations(scope->declarations());

4
deps/v8/src/ast/ast-traversal-visitor.h поставляемый
Просмотреть файл

@ -447,9 +447,9 @@ void AstTraversalVisitor<Subclass>::VisitClassLiteral(ClassLiteral* expr) {
RECURSE_EXPRESSION(Visit(expr->extends()));
}
RECURSE_EXPRESSION(Visit(expr->constructor()));
ZoneList<ObjectLiteralProperty*>* props = expr->properties();
ZoneList<ClassLiteralProperty*>* props = expr->properties();
for (int i = 0; i < props->length(); ++i) {
ObjectLiteralProperty* prop = props->at(i);
ClassLiteralProperty* prop = props->at(i);
if (!prop->key()->IsLiteral()) {
RECURSE_EXPRESSION(Visit(prop->key()));
}

14
deps/v8/src/ast/ast-type-bounds.h поставляемый
Просмотреть файл

@ -7,8 +7,8 @@
#ifndef V8_AST_AST_TYPE_BOUNDS_H_
#define V8_AST_AST_TYPE_BOUNDS_H_
#include "src/types.h"
#include "src/zone-containers.h"
#include "src/ast/ast-types.h"
#include "src/zone/zone-containers.h"
namespace v8 {
namespace internal {
@ -20,18 +20,18 @@ class AstTypeBounds {
explicit AstTypeBounds(Zone* zone) : bounds_map_(zone) {}
~AstTypeBounds() {}
Bounds get(Expression* expression) const {
ZoneMap<Expression*, Bounds>::const_iterator i =
AstBounds get(Expression* expression) const {
ZoneMap<Expression*, AstBounds>::const_iterator i =
bounds_map_.find(expression);
return (i != bounds_map_.end()) ? i->second : Bounds::Unbounded();
return (i != bounds_map_.end()) ? i->second : AstBounds::Unbounded();
}
void set(Expression* expression, Bounds bounds) {
void set(Expression* expression, AstBounds bounds) {
bounds_map_[expression] = bounds;
}
private:
ZoneMap<Expression*, Bounds> bounds_map_;
ZoneMap<Expression*, AstBounds> bounds_map_;
};
} // namespace internal

Разница между файлами не показана из-за своего большого размера Загрузить разницу

1024
deps/v8/src/ast/ast-types.h поставляемый Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

68
deps/v8/src/ast/ast-value-factory.cc поставляемый
Просмотреть файл

@ -237,28 +237,14 @@ AstRawString* AstValueFactory::GetTwoByteStringInternal(
const AstRawString* AstValueFactory::GetString(Handle<String> literal) {
// For the FlatContent to stay valid, we shouldn't do any heap
// allocation. Make sure we won't try to internalize the string in GetString.
AstRawString* result = NULL;
Isolate* saved_isolate = isolate_;
isolate_ = NULL;
{
DisallowHeapAllocation no_gc;
String::FlatContent content = literal->GetFlatContent();
if (content.IsOneByte()) {
result = GetOneByteStringInternal(content.ToOneByteVector());
} else {
DCHECK(content.IsTwoByte());
result = GetTwoByteStringInternal(content.ToUC16Vector());
}
}
isolate_ = saved_isolate;
if (strings_ != nullptr && isolate_) {
// Only the string we are creating is uninternalized at this point.
DCHECK_EQ(result, strings_);
DCHECK_NULL(strings_->next());
result->Internalize(isolate_);
ResetStrings();
DisallowHeapAllocation no_gc;
String::FlatContent content = literal->GetFlatContent();
if (content.IsOneByte()) {
result = GetOneByteStringInternal(content.ToOneByteVector());
} else {
DCHECK(content.IsTwoByte());
result = GetTwoByteStringInternal(content.ToUC16Vector());
}
return result;
}
@ -274,15 +260,40 @@ const AstConsString* AstValueFactory::NewConsString(
return new_string;
}
const AstRawString* AstValueFactory::ConcatStrings(const AstRawString* left,
const AstRawString* right) {
int left_length = left->length();
int right_length = right->length();
const unsigned char* left_data = left->raw_data();
const unsigned char* right_data = right->raw_data();
if (left->is_one_byte() && right->is_one_byte()) {
uint8_t* buffer = zone_->NewArray<uint8_t>(left_length + right_length);
memcpy(buffer, left_data, left_length);
memcpy(buffer + left_length, right_data, right_length);
Vector<const uint8_t> literal(buffer, left_length + right_length);
return GetOneByteStringInternal(literal);
} else {
uint16_t* buffer = zone_->NewArray<uint16_t>(left_length + right_length);
if (left->is_one_byte()) {
for (int i = 0; i < left_length; ++i) {
buffer[i] = left_data[i];
}
} else {
memcpy(buffer, left_data, 2 * left_length);
}
if (right->is_one_byte()) {
for (int i = 0; i < right_length; ++i) {
buffer[i + left_length] = right_data[i];
}
} else {
memcpy(buffer + left_length, right_data, 2 * right_length);
}
Vector<const uint16_t> literal(buffer, left_length + right_length);
return GetTwoByteStringInternal(literal);
}
}
void AstValueFactory::Internalize(Isolate* isolate) {
if (isolate_) {
DCHECK_NULL(strings_);
DCHECK_NULL(values_);
// Everything is already internalized.
return;
}
// Strings need to be internalized before values, because values refer to
// strings.
for (AstString* current = strings_; current != nullptr;) {
@ -295,7 +306,6 @@ void AstValueFactory::Internalize(Isolate* isolate) {
current->Internalize(isolate);
current = next;
}
isolate_ = isolate;
ResetStrings();
values_ = nullptr;
}

27
deps/v8/src/ast/ast-value-factory.h поставляемый
Просмотреть файл

@ -283,8 +283,8 @@ class AstValue : public ZoneObject {
F(default, "default") \
F(done, "done") \
F(dot, ".") \
F(dot_class_field_init, ".class-field-init") \
F(dot_for, ".for") \
F(dot_generator, ".generator") \
F(dot_generator_object, ".generator_object") \
F(dot_iterator, ".iterator") \
F(dot_result, ".result") \
@ -326,7 +326,6 @@ class AstValueFactory {
values_(nullptr),
strings_end_(&strings_),
zone_(zone),
isolate_(NULL),
hash_seed_(hash_seed) {
ResetStrings();
#define F(name, str) name##_string_ = NULL;
@ -352,11 +351,10 @@ class AstValueFactory {
const AstRawString* GetString(Handle<String> literal);
const AstConsString* NewConsString(const AstString* left,
const AstString* right);
const AstRawString* ConcatStrings(const AstRawString* left,
const AstRawString* right);
void Internalize(Isolate* isolate);
bool IsInternalized() {
return isolate_ != NULL;
}
#define F(name, str) \
const AstRawString* name##_string() { \
@ -384,21 +382,13 @@ class AstValueFactory {
private:
AstValue* AddValue(AstValue* value) {
if (isolate_) {
value->Internalize(isolate_);
} else {
value->set_next(values_);
values_ = value;
}
value->set_next(values_);
values_ = value;
return value;
}
AstString* AddString(AstString* string) {
if (isolate_) {
string->Internalize(isolate_);
} else {
*strings_end_ = string;
strings_end_ = string->next_location();
}
*strings_end_ = string;
strings_end_ = string->next_location();
return string;
}
void ResetStrings() {
@ -413,7 +403,7 @@ class AstValueFactory {
static bool AstRawStringCompare(void* a, void* b);
// All strings are copied here, one after another (no NULLs inbetween).
base::HashMap string_table_;
base::CustomMatcherHashMap string_table_;
// For keeping track of all AstValues and AstRawStrings we've created (so that
// they can be internalized later).
AstValue* values_;
@ -422,7 +412,6 @@ class AstValueFactory {
AstString* strings_;
AstString** strings_end_;
Zone* zone_;
Isolate* isolate_;
uint32_t hash_seed_;

142
deps/v8/src/ast/ast.cc поставляемый
Просмотреть файл

@ -6,6 +6,7 @@
#include <cmath> // For isfinite.
#include "src/ast/compile-time-value.h"
#include "src/ast/prettyprinter.h"
#include "src/ast/scopes.h"
#include "src/base/hashmap.h"
@ -13,7 +14,6 @@
#include "src/code-stubs.h"
#include "src/contexts.h"
#include "src/conversions.h"
#include "src/parsing/parser.h"
#include "src/property-details.h"
#include "src/property.h"
#include "src/string-stream.h"
@ -83,18 +83,14 @@ bool Expression::IsNullLiteral() const {
}
bool Expression::IsUndefinedLiteral() const {
if (IsLiteral()) {
if (AsLiteral()->raw_value()->IsUndefined()) {
return true;
}
}
if (IsLiteral() && AsLiteral()->raw_value()->IsUndefined()) return true;
const VariableProxy* var_proxy = AsVariableProxy();
if (var_proxy == NULL) return false;
if (var_proxy == nullptr) return false;
Variable* var = var_proxy->var();
// The global identifier "undefined" is immutable. Everything
// else could be reassigned.
return var != NULL && var->IsUnallocatedOrGlobalSlot() &&
return var != NULL && var->IsUnallocated() &&
var_proxy->raw_name()->IsOneByteEqualTo("undefined");
}
@ -166,36 +162,32 @@ bool Statement::IsJump() const {
VariableProxy::VariableProxy(Variable* var, int start_position,
int end_position)
: Expression(start_position, kVariableProxy),
bit_field_(IsThisField::encode(var->is_this()) |
IsAssignedField::encode(false) |
IsResolvedField::encode(false)),
end_position_(end_position),
raw_name_(var->raw_name()),
next_unresolved_(nullptr) {
bit_field_ |= IsThisField::encode(var->is_this()) |
IsAssignedField::encode(false) | IsResolvedField::encode(false);
BindTo(var);
}
VariableProxy::VariableProxy(const AstRawString* name,
Variable::Kind variable_kind, int start_position,
VariableKind variable_kind, int start_position,
int end_position)
: Expression(start_position, kVariableProxy),
bit_field_(IsThisField::encode(variable_kind == Variable::THIS) |
IsAssignedField::encode(false) |
IsResolvedField::encode(false)),
end_position_(end_position),
raw_name_(name),
next_unresolved_(nullptr) {}
next_unresolved_(nullptr) {
bit_field_ |= IsThisField::encode(variable_kind == THIS_VARIABLE) |
IsAssignedField::encode(false) | IsResolvedField::encode(false);
}
VariableProxy::VariableProxy(const VariableProxy* copy_from)
: Expression(copy_from->position(), kVariableProxy),
bit_field_(copy_from->bit_field_),
end_position_(copy_from->end_position_),
next_unresolved_(nullptr) {
if (copy_from->is_resolved()) {
var_ = copy_from->var_;
} else {
raw_name_ = copy_from->raw_name_;
}
bit_field_ = copy_from->bit_field_;
DCHECK(!copy_from->is_resolved());
raw_name_ = copy_from->raw_name_;
}
void VariableProxy::BindTo(Variable* var) {
@ -253,12 +245,13 @@ void ForInStatement::AssignFeedbackVectorSlots(Isolate* isolate,
Assignment::Assignment(Token::Value op, Expression* target, Expression* value,
int pos)
: Expression(pos, kAssignment),
bit_field_(
IsUninitializedField::encode(false) | KeyTypeField::encode(ELEMENT) |
StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op)),
target_(target),
value_(value),
binary_operation_(NULL) {}
binary_operation_(NULL) {
bit_field_ |= IsUninitializedField::encode(false) |
KeyTypeField::encode(ELEMENT) |
StoreModeField::encode(STANDARD_STORE) | TokenField::encode(op);
}
void Assignment::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
@ -273,7 +266,7 @@ void CountOperation::AssignFeedbackVectorSlots(Isolate* isolate,
AssignVectorSlots(expression(), spec, &slot_);
// Assign a slot to collect feedback about binary operations. Used only in
// ignition. Fullcodegen uses AstId to record type feedback.
binary_operation_slot_ = spec->AddGeneralSlot();
binary_operation_slot_ = spec->AddInterpreterBinaryOpICSlot();
}
@ -320,6 +313,7 @@ LanguageMode FunctionLiteral::language_mode() const {
return scope()->language_mode();
}
FunctionKind FunctionLiteral::kind() const { return scope()->function_kind(); }
bool FunctionLiteral::NeedsHomeObject(Expression* expr) {
if (expr == nullptr || !expr->IsFunctionLiteral()) return false;
@ -327,27 +321,16 @@ bool FunctionLiteral::NeedsHomeObject(Expression* expr) {
return expr->AsFunctionLiteral()->scope()->NeedsHomeObject();
}
ObjectLiteralProperty::ObjectLiteralProperty(Expression* key, Expression* value,
Kind kind, bool is_static,
bool is_computed_name)
: key_(key),
value_(value),
Kind kind, bool is_computed_name)
: LiteralProperty(key, value, is_computed_name),
kind_(kind),
emit_store_(true),
is_static_(is_static),
is_computed_name_(is_computed_name) {}
emit_store_(true) {}
ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
Expression* key, Expression* value,
bool is_static,
bool is_computed_name)
: key_(key),
value_(value),
emit_store_(true),
is_static_(is_static),
is_computed_name_(is_computed_name) {
: LiteralProperty(key, value, is_computed_name), emit_store_(true) {
if (!is_computed_name &&
key->AsLiteral()->raw_value()->EqualsString(
ast_value_factory->proto_string())) {
@ -361,13 +344,20 @@ ObjectLiteralProperty::ObjectLiteralProperty(AstValueFactory* ast_value_factory,
}
}
bool ObjectLiteralProperty::NeedsSetFunctionName() const {
bool LiteralProperty::NeedsSetFunctionName() const {
return is_computed_name_ &&
(value_->IsAnonymousFunctionDefinition() ||
(value_->IsFunctionLiteral() &&
IsConciseMethod(value_->AsFunctionLiteral()->kind())));
}
ClassLiteralProperty::ClassLiteralProperty(Expression* key, Expression* value,
Kind kind, bool is_static,
bool is_computed_name)
: LiteralProperty(key, value, is_computed_name),
kind_(kind),
is_static_(is_static) {}
void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) {
@ -379,7 +369,7 @@ void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
}
for (int i = 0; i < properties()->length(); i++) {
ObjectLiteral::Property* property = properties()->at(i);
ClassLiteral::Property* property = properties()->at(i);
Expression* value = property->value();
if (FunctionLiteral::NeedsHomeObject(value)) {
property->SetSlot(spec->AddStoreICSlot());
@ -387,8 +377,7 @@ void ClassLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
}
}
bool ObjectLiteral::Property::IsCompileTimeValue() {
bool ObjectLiteral::Property::IsCompileTimeValue() const {
return kind_ == CONSTANT ||
(kind_ == MATERIALIZED_LITERAL &&
CompileTimeValue::IsCompileTimeValue(value_));
@ -399,11 +388,7 @@ void ObjectLiteral::Property::set_emit_store(bool emit_store) {
emit_store_ = emit_store;
}
bool ObjectLiteral::Property::emit_store() {
return emit_store_;
}
bool ObjectLiteral::Property::emit_store() const { return emit_store_; }
void ObjectLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
@ -473,8 +458,8 @@ void ObjectLiteral::CalculateEmitStore(Zone* zone) {
ZoneAllocationPolicy allocator(zone);
ZoneHashMap table(Literal::Match, ZoneHashMap::kDefaultHashMapCapacity,
allocator);
CustomMatcherZoneHashMap table(
Literal::Match, ZoneHashMap::kDefaultHashMapCapacity, allocator);
for (int i = properties()->length() - 1; i >= 0; i--) {
ObjectLiteral::Property* property = properties()->at(i);
if (property->is_computed_name()) continue;
@ -551,7 +536,7 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
// TODO(verwaest): Remove once we can store them inline.
if (FLAG_track_double_fields &&
(value->IsNumber() || value->IsUninitialized(isolate))) {
may_store_doubles_ = true;
bit_field_ = MayStoreDoublesField::update(bit_field_, true);
}
is_simple = is_simple && !value->IsUninitialized(isolate);
@ -578,9 +563,11 @@ void ObjectLiteral::BuildConstantProperties(Isolate* isolate) {
}
constant_properties_ = constant_properties;
fast_elements_ =
(max_element_index <= 32) || ((2 * elements) >= max_element_index);
has_elements_ = elements > 0;
bit_field_ = FastElementsField::update(
bit_field_,
(max_element_index <= 32) || ((2 * elements) >= max_element_index));
bit_field_ = HasElementsField::update(bit_field_, elements > 0);
set_is_simple(is_simple);
set_depth(depth_acc);
}
@ -662,8 +649,7 @@ void ArrayLiteral::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSlotCache* cache) {
// This logic that computes the number of slots needed for vector store
// ics must mirror FullCodeGenerator::VisitArrayLiteral.
int array_index = 0;
for (; array_index < values()->length(); array_index++) {
for (int array_index = 0; array_index < values()->length(); array_index++) {
Expression* subexpr = values()->at(array_index);
DCHECK(!subexpr->IsSpread());
if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
@ -731,7 +717,7 @@ void BinaryOperation::AssignFeedbackVectorSlots(
case Token::OR:
return;
default:
type_feedback_slot_ = spec->AddGeneralSlot();
type_feedback_slot_ = spec->AddInterpreterBinaryOpICSlot();
return;
}
}
@ -741,6 +727,20 @@ static bool IsTypeof(Expression* expr) {
return maybe_unary != NULL && maybe_unary->op() == Token::TYPEOF;
}
void CompareOperation::AssignFeedbackVectorSlots(
Isolate* isolate, FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache_) {
// Feedback vector slot is only used by interpreter for binary operations.
// Full-codegen uses AstId to record type feedback.
switch (op()) {
// instanceof and in do not collect type feedback.
case Token::INSTANCEOF:
case Token::IN:
return;
default:
type_feedback_slot_ = spec->AddInterpreterCompareICSlot();
}
}
// Check for the pattern: typeof <expression> equals <string literal>.
static bool MatchLiteralCompareTypeof(Expression* left,
@ -759,8 +759,8 @@ static bool MatchLiteralCompareTypeof(Expression* left,
bool CompareOperation::IsLiteralCompareTypeof(Expression** expr,
Handle<String>* check) {
return MatchLiteralCompareTypeof(left_, op_, right_, expr, check) ||
MatchLiteralCompareTypeof(right_, op_, left_, expr, check);
return MatchLiteralCompareTypeof(left_, op(), right_, expr, check) ||
MatchLiteralCompareTypeof(right_, op(), left_, expr, check);
}
@ -790,8 +790,8 @@ static bool MatchLiteralCompareUndefined(Expression* left,
}
bool CompareOperation::IsLiteralCompareUndefined(Expression** expr) {
return MatchLiteralCompareUndefined(left_, op_, right_, expr) ||
MatchLiteralCompareUndefined(right_, op_, left_, expr);
return MatchLiteralCompareUndefined(left_, op(), right_, expr) ||
MatchLiteralCompareUndefined(right_, op(), left_, expr);
}
@ -809,8 +809,8 @@ static bool MatchLiteralCompareNull(Expression* left,
bool CompareOperation::IsLiteralCompareNull(Expression** expr) {
return MatchLiteralCompareNull(left_, op_, right_, expr) ||
MatchLiteralCompareNull(right_, op_, left_, expr);
return MatchLiteralCompareNull(left_, op(), right_, expr) ||
MatchLiteralCompareNull(right_, op(), left_, expr);
}
@ -913,7 +913,7 @@ Call::CallType Call::GetCallType() const {
if (proxy != NULL) {
if (is_possibly_eval()) {
return POSSIBLY_EVAL_CALL;
} else if (proxy->var()->IsUnallocatedOrGlobalSlot()) {
} else if (proxy->var()->IsUnallocated()) {
return GLOBAL_CALL;
} else if (proxy->var()->IsLookupSlot()) {
return LOOKUP_SLOT_CALL;
@ -940,7 +940,13 @@ CaseClause::CaseClause(Expression* label, ZoneList<Statement*>* statements,
: Expression(pos, kCaseClause),
label_(label),
statements_(statements),
compare_type_(Type::None()) {}
compare_type_(AstType::None()) {}
void CaseClause::AssignFeedbackVectorSlots(Isolate* isolate,
FeedbackVectorSpec* spec,
FeedbackVectorSlotCache* cache) {
type_feedback_slot_ = spec->AddInterpreterCompareICSlot();
}
uint32_t Literal::Hash() {
return raw_value()->IsString()

649
deps/v8/src/ast/ast.h поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

56
deps/v8/src/ast/compile-time-value.cc поставляемый Normal file
Просмотреть файл

@ -0,0 +1,56 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/ast/compile-time-value.h"
#include "src/ast/ast.h"
#include "src/factory.h"
#include "src/handles-inl.h"
#include "src/isolate.h"
#include "src/objects-inl.h"
namespace v8 {
namespace internal {
bool CompileTimeValue::IsCompileTimeValue(Expression* expression) {
if (expression->IsLiteral()) return true;
MaterializedLiteral* lit = expression->AsMaterializedLiteral();
return lit != NULL && lit->is_simple();
}
Handle<FixedArray> CompileTimeValue::GetValue(Isolate* isolate,
Expression* expression) {
Factory* factory = isolate->factory();
DCHECK(IsCompileTimeValue(expression));
Handle<FixedArray> result = factory->NewFixedArray(2, TENURED);
ObjectLiteral* object_literal = expression->AsObjectLiteral();
if (object_literal != NULL) {
DCHECK(object_literal->is_simple());
if (object_literal->fast_elements()) {
result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_FAST_ELEMENTS));
} else {
result->set(kLiteralTypeSlot, Smi::FromInt(OBJECT_LITERAL_SLOW_ELEMENTS));
}
result->set(kElementsSlot, *object_literal->constant_properties());
} else {
ArrayLiteral* array_literal = expression->AsArrayLiteral();
DCHECK(array_literal != NULL && array_literal->is_simple());
result->set(kLiteralTypeSlot, Smi::FromInt(ARRAY_LITERAL));
result->set(kElementsSlot, *array_literal->constant_elements());
}
return result;
}
CompileTimeValue::LiteralType CompileTimeValue::GetLiteralType(
Handle<FixedArray> value) {
Smi* literal_type = Smi::cast(value->get(kLiteralTypeSlot));
return static_cast<LiteralType>(literal_type->value());
}
Handle<FixedArray> CompileTimeValue::GetElements(Handle<FixedArray> value) {
return Handle<FixedArray>(FixedArray::cast(value->get(kElementsSlot)));
}
} // namespace internal
} // namespace v8

45
deps/v8/src/ast/compile-time-value.h поставляемый Normal file
Просмотреть файл

@ -0,0 +1,45 @@
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_AST_COMPILE_TIME_VALUE
#define V8_AST_COMPILE_TIME_VALUE
#include "src/allocation.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
class Expression;
// Support for handling complex values (array and object literals) that
// can be fully handled at compile time.
class CompileTimeValue : public AllStatic {
public:
enum LiteralType {
OBJECT_LITERAL_FAST_ELEMENTS,
OBJECT_LITERAL_SLOW_ELEMENTS,
ARRAY_LITERAL
};
static bool IsCompileTimeValue(Expression* expression);
// Get the value as a compile time value.
static Handle<FixedArray> GetValue(Isolate* isolate, Expression* expression);
// Get the type of a compile time value returned by GetValue().
static LiteralType GetLiteralType(Handle<FixedArray> value);
// Get the elements array of a compile time value returned by GetValue().
static Handle<FixedArray> GetElements(Handle<FixedArray> value);
private:
static const int kLiteralTypeSlot = 0;
static const int kElementsSlot = 1;
};
} // namespace internal
} // namespace v8
#endif // V8_AST_COMPILE_TIME_VALUE

7
deps/v8/src/ast/context-slot-cache.cc поставляемый
Просмотреть файл

@ -8,6 +8,13 @@
#include "src/ast/scopes.h"
#include "src/bootstrapper.h"
// FIXME(mstarzinger, marja): This is weird, but required because of the missing
// (disallowed) include: src/factory.h -> src/objects-inl.h
#include "src/objects-inl.h"
// FIXME(mstarzinger, marja): This is weird, but required because of the missing
// (disallowed) include: src/type-feedback-vector.h ->
// src/type-feedback-vector-inl.h
#include "src/type-feedback-vector-inl.h"
namespace v8 {
namespace internal {

1
deps/v8/src/ast/context-slot-cache.h поставляемый
Просмотреть файл

@ -7,7 +7,6 @@
#include "src/allocation.h"
#include "src/ast/modules.h"
#include "src/ast/variables.h"
namespace v8 {
namespace internal {

252
deps/v8/src/ast/modules.cc поставляемый
Просмотреть файл

@ -12,49 +12,35 @@ namespace internal {
void ModuleDescriptor::AddImport(
const AstRawString* import_name, const AstRawString* local_name,
const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(import_name);
DCHECK_NOT_NULL(local_name);
DCHECK_NOT_NULL(module_request);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
Entry* entry = new (zone) Entry(loc);
entry->local_name = local_name;
entry->import_name = import_name;
entry->module_request = module_request;
regular_imports_.insert(std::make_pair(entry->local_name, entry));
// We don't care if there's already an entry for this local name, as in that
// case we will report an error when declaring the variable.
entry->module_request = AddModuleRequest(module_request);
AddRegularImport(entry);
}
void ModuleDescriptor::AddStarImport(
const AstRawString* local_name, const AstRawString* module_request,
Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(local_name);
DCHECK_NOT_NULL(module_request);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
Entry* entry = new (zone) Entry(loc);
entry->local_name = local_name;
entry->module_request = module_request;
special_imports_.Add(entry, zone);
entry->module_request = AddModuleRequest(module_request);
AddNamespaceImport(entry, zone);
}
void ModuleDescriptor::AddEmptyImport(
const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(module_request);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
entry->module_request = module_request;
special_imports_.Add(entry, zone);
void ModuleDescriptor::AddEmptyImport(const AstRawString* module_request) {
AddModuleRequest(module_request);
}
void ModuleDescriptor::AddExport(
const AstRawString* local_name, const AstRawString* export_name,
Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(local_name);
DCHECK_NOT_NULL(export_name);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
Entry* entry = new (zone) Entry(loc);
entry->export_name = export_name;
entry->local_name = local_name;
exports_.Add(entry, zone);
AddRegularExport(entry);
}
@ -63,40 +49,186 @@ void ModuleDescriptor::AddExport(
const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(import_name);
DCHECK_NOT_NULL(export_name);
DCHECK_NOT_NULL(module_request);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
Entry* entry = new (zone) Entry(loc);
entry->export_name = export_name;
entry->import_name = import_name;
entry->module_request = module_request;
exports_.Add(entry, zone);
entry->module_request = AddModuleRequest(module_request);
AddSpecialExport(entry, zone);
}
void ModuleDescriptor::AddStarExport(
const AstRawString* module_request, Scanner::Location loc, Zone* zone) {
DCHECK_NOT_NULL(module_request);
ModuleEntry* entry = new (zone) ModuleEntry(loc);
entry->module_request = module_request;
exports_.Add(entry, zone);
Entry* entry = new (zone) Entry(loc);
entry->module_request = AddModuleRequest(module_request);
AddSpecialExport(entry, zone);
}
void ModuleDescriptor::MakeIndirectExportsExplicit() {
for (auto entry : exports_) {
if (entry->export_name == nullptr) continue;
if (entry->import_name != nullptr) continue;
DCHECK_NOT_NULL(entry->local_name);
auto it = regular_imports_.find(entry->local_name);
if (it != regular_imports_.end()) {
// Found an indirect export.
DCHECK_NOT_NULL(it->second->module_request);
DCHECK_NOT_NULL(it->second->import_name);
entry->import_name = it->second->import_name;
entry->module_request = it->second->module_request;
entry->local_name = nullptr;
namespace {
Handle<Object> ToStringOrUndefined(Isolate* isolate, const AstRawString* s) {
return (s == nullptr)
? Handle<Object>::cast(isolate->factory()->undefined_value())
: Handle<Object>::cast(s->string());
}
const AstRawString* FromStringOrUndefined(Isolate* isolate,
AstValueFactory* avfactory,
Handle<Object> object) {
if (object->IsUndefined(isolate)) return nullptr;
return avfactory->GetString(Handle<String>::cast(object));
}
} // namespace
Handle<ModuleInfoEntry> ModuleDescriptor::Entry::Serialize(
Isolate* isolate) const {
CHECK(Smi::IsValid(module_request)); // TODO(neis): Check earlier?
return ModuleInfoEntry::New(
isolate, ToStringOrUndefined(isolate, export_name),
ToStringOrUndefined(isolate, local_name),
ToStringOrUndefined(isolate, import_name),
Handle<Object>(Smi::FromInt(module_request), isolate));
}
ModuleDescriptor::Entry* ModuleDescriptor::Entry::Deserialize(
Isolate* isolate, AstValueFactory* avfactory,
Handle<ModuleInfoEntry> entry) {
Entry* result = new (avfactory->zone()) Entry(Scanner::Location::invalid());
result->export_name = FromStringOrUndefined(
isolate, avfactory, handle(entry->export_name(), isolate));
result->local_name = FromStringOrUndefined(
isolate, avfactory, handle(entry->local_name(), isolate));
result->import_name = FromStringOrUndefined(
isolate, avfactory, handle(entry->import_name(), isolate));
result->module_request = Smi::cast(entry->module_request())->value();
return result;
}
Handle<FixedArray> ModuleDescriptor::SerializeRegularExports(Isolate* isolate,
Zone* zone) const {
// We serialize regular exports in a way that lets us later iterate over their
// local names and for each local name immediately access all its export
// names. (Regular exports have neither import name nor module request.)
ZoneVector<Handle<Object>> data(zone);
data.reserve(2 * regular_exports_.size());
for (auto it = regular_exports_.begin(); it != regular_exports_.end();) {
// Find out how many export names this local name has.
auto next = it;
int size = 0;
do {
++next;
++size;
} while (next != regular_exports_.end() && next->first == it->first);
Handle<FixedArray> export_names = isolate->factory()->NewFixedArray(size);
data.push_back(it->second->local_name->string());
data.push_back(export_names);
// Collect the export names.
int i = 0;
for (; it != next; ++it) {
export_names->set(i++, *it->second->export_name->string());
}
DCHECK_EQ(i, size);
// Continue with the next distinct key.
DCHECK(it == next);
}
// We cannot create the FixedArray earlier because we only now know the
// precise size (the number of unique keys in regular_exports).
int size = static_cast<int>(data.size());
Handle<FixedArray> result = isolate->factory()->NewFixedArray(size);
for (int i = 0; i < size; ++i) {
result->set(i, *data[i]);
}
return result;
}
void ModuleDescriptor::DeserializeRegularExports(Isolate* isolate,
AstValueFactory* avfactory,
Handle<FixedArray> data) {
for (int i = 0, length_i = data->length(); i < length_i;) {
Handle<String> local_name(String::cast(data->get(i++)), isolate);
Handle<FixedArray> export_names(FixedArray::cast(data->get(i++)), isolate);
for (int j = 0, length_j = export_names->length(); j < length_j; ++j) {
Handle<String> export_name(String::cast(export_names->get(j)), isolate);
Entry* entry =
new (avfactory->zone()) Entry(Scanner::Location::invalid());
entry->local_name = avfactory->GetString(local_name);
entry->export_name = avfactory->GetString(export_name);
AddRegularExport(entry);
}
}
}
void ModuleDescriptor::MakeIndirectExportsExplicit(Zone* zone) {
for (auto it = regular_exports_.begin(); it != regular_exports_.end();) {
Entry* entry = it->second;
DCHECK_NOT_NULL(entry->local_name);
auto import = regular_imports_.find(entry->local_name);
if (import != regular_imports_.end()) {
// Found an indirect export. Patch export entry and move it from regular
// to special.
DCHECK_NULL(entry->import_name);
DCHECK_LT(entry->module_request, 0);
DCHECK_NOT_NULL(import->second->import_name);
DCHECK_LE(0, import->second->module_request);
DCHECK_LT(import->second->module_request,
static_cast<int>(module_requests_.size()));
entry->import_name = import->second->import_name;
entry->module_request = import->second->module_request;
entry->local_name = nullptr;
AddSpecialExport(entry, zone);
it = regular_exports_.erase(it);
} else {
it++;
}
}
}
namespace {
const ModuleDescriptor::Entry* BetterDuplicate(
const ModuleDescriptor::Entry* candidate,
ZoneMap<const AstRawString*, const ModuleDescriptor::Entry*>& export_names,
const ModuleDescriptor::Entry* current_duplicate) {
DCHECK_NOT_NULL(candidate->export_name);
DCHECK(candidate->location.IsValid());
auto insert_result =
export_names.insert(std::make_pair(candidate->export_name, candidate));
if (insert_result.second) return current_duplicate;
if (current_duplicate == nullptr) {
current_duplicate = insert_result.first->second;
}
return (candidate->location.beg_pos > current_duplicate->location.beg_pos)
? candidate
: current_duplicate;
}
} // namespace
const ModuleDescriptor::Entry* ModuleDescriptor::FindDuplicateExport(
Zone* zone) const {
const ModuleDescriptor::Entry* duplicate = nullptr;
ZoneMap<const AstRawString*, const ModuleDescriptor::Entry*> export_names(
zone);
for (const auto& elem : regular_exports_) {
duplicate = BetterDuplicate(elem.second, export_names, duplicate);
}
for (auto entry : special_exports_) {
if (entry->export_name == nullptr) continue; // Star export.
duplicate = BetterDuplicate(entry, export_names, duplicate);
}
return duplicate;
}
bool ModuleDescriptor::Validate(ModuleScope* module_scope,
PendingCompilationErrorHandler* error_handler,
Zone* zone) {
@ -105,29 +237,19 @@ bool ModuleDescriptor::Validate(ModuleScope* module_scope,
// Report error iff there are duplicate exports.
{
ZoneAllocationPolicy allocator(zone);
ZoneHashMap* export_names = new (zone->New(sizeof(ZoneHashMap)))
ZoneHashMap(ZoneHashMap::PointersMatch,
ZoneHashMap::kDefaultHashMapCapacity, allocator);
for (auto entry : exports_) {
if (entry->export_name == nullptr) continue;
AstRawString* key = const_cast<AstRawString*>(entry->export_name);
ZoneHashMap::Entry* p =
export_names->LookupOrInsert(key, key->hash(), allocator);
DCHECK_NOT_NULL(p);
if (p->value != nullptr) {
error_handler->ReportMessageAt(
entry->location.beg_pos, entry->location.end_pos,
MessageTemplate::kDuplicateExport, entry->export_name);
return false;
}
p->value = key; // Anything but nullptr.
const Entry* entry = FindDuplicateExport(zone);
if (entry != nullptr) {
error_handler->ReportMessageAt(
entry->location.beg_pos, entry->location.end_pos,
MessageTemplate::kDuplicateExport, entry->export_name);
return false;
}
}
// Report error iff there are exports of non-existent local names.
for (auto entry : exports_) {
if (entry->local_name == nullptr) continue;
for (const auto& elem : regular_exports_) {
const Entry* entry = elem.second;
DCHECK_NOT_NULL(entry->local_name);
if (module_scope->LookupLocal(entry->local_name) == nullptr) {
error_handler->ReportMessageAt(
entry->location.beg_pos, entry->location.end_pos,
@ -136,7 +258,7 @@ bool ModuleDescriptor::Validate(ModuleScope* module_scope,
}
}
MakeIndirectExportsExplicit();
MakeIndirectExportsExplicit(zone);
return true;
}

122
deps/v8/src/ast/modules.h поставляемый
Просмотреть файл

@ -7,19 +7,26 @@
#include "src/parsing/scanner.h" // Only for Scanner::Location.
#include "src/pending-compilation-error-handler.h"
#include "src/zone-containers.h"
#include "src/zone/zone-containers.h"
namespace v8 {
namespace internal {
class AstRawString;
class ModuleInfoEntry;
class ModuleDescriptor : public ZoneObject {
public:
explicit ModuleDescriptor(Zone* zone)
: exports_(1, zone), special_imports_(1, zone), regular_imports_(zone) {}
: module_requests_(zone),
special_exports_(1, zone),
namespace_imports_(1, zone),
regular_exports_(zone),
regular_imports_(zone) {}
// The following Add* methods are high-level convenience functions for use by
// the parser.
// import x from "foo.js";
// import {x} from "foo.js";
@ -37,9 +44,7 @@ class ModuleDescriptor : public ZoneObject {
// import "foo.js";
// import {} from "foo.js";
// export {} from "foo.js"; (sic!)
void AddEmptyImport(
const AstRawString* module_request, const Scanner::Location loc,
Zone* zone);
void AddEmptyImport(const AstRawString* module_request);
// export {x};
// export {x as y};
@ -67,38 +72,107 @@ class ModuleDescriptor : public ZoneObject {
bool Validate(ModuleScope* module_scope,
PendingCompilationErrorHandler* error_handler, Zone* zone);
struct ModuleEntry : public ZoneObject {
struct Entry : public ZoneObject {
const Scanner::Location location;
const AstRawString* export_name;
const AstRawString* local_name;
const AstRawString* import_name;
const AstRawString* module_request;
// The module_request value records the order in which modules are
// requested. It also functions as an index into the ModuleInfo's array of
// module specifiers and into the Module's array of requested modules. A
// negative value means no module request.
int module_request;
explicit ModuleEntry(Scanner::Location loc)
// TODO(neis): Remove local_name component?
explicit Entry(Scanner::Location loc)
: location(loc),
export_name(nullptr),
local_name(nullptr),
import_name(nullptr),
module_request(nullptr) {}
module_request(-1) {}
// (De-)serialization support.
// Note that the location value is not preserved as it's only needed by the
// parser. (A Deserialize'd entry has an invalid location.)
Handle<ModuleInfoEntry> Serialize(Isolate* isolate) const;
static Entry* Deserialize(Isolate* isolate, AstValueFactory* avfactory,
Handle<ModuleInfoEntry> entry);
};
const ZoneList<ModuleEntry*>& exports() const { return exports_; }
// Module requests.
const ZoneMap<const AstRawString*, int>& module_requests() const {
return module_requests_;
}
// Empty imports and namespace imports.
const ZoneList<const ModuleEntry*>& special_imports() const {
return special_imports_;
// Namespace imports.
const ZoneList<const Entry*>& namespace_imports() const {
return namespace_imports_;
}
// All the remaining imports, indexed by local name.
const ZoneMap<const AstRawString*, const ModuleEntry*>& regular_imports()
const {
const ZoneMap<const AstRawString*, const Entry*>& regular_imports() const {
return regular_imports_;
}
// Star exports and explicitly indirect exports.
const ZoneList<const Entry*>& special_exports() const {
return special_exports_;
}
// All the remaining exports, indexed by local name.
// After canonicalization (see Validate), these are exactly the local exports.
const ZoneMultimap<const AstRawString*, Entry*>& regular_exports() const {
return regular_exports_;
}
void AddRegularExport(Entry* entry) {
DCHECK_NOT_NULL(entry->export_name);
DCHECK_NOT_NULL(entry->local_name);
DCHECK_NULL(entry->import_name);
DCHECK_LT(entry->module_request, 0);
regular_exports_.insert(std::make_pair(entry->local_name, entry));
}
void AddSpecialExport(const Entry* entry, Zone* zone) {
DCHECK_NULL(entry->local_name);
DCHECK_LE(0, entry->module_request);
special_exports_.Add(entry, zone);
}
void AddRegularImport(const Entry* entry) {
DCHECK_NOT_NULL(entry->import_name);
DCHECK_NOT_NULL(entry->local_name);
DCHECK_NULL(entry->export_name);
DCHECK_LE(0, entry->module_request);
regular_imports_.insert(std::make_pair(entry->local_name, entry));
// We don't care if there's already an entry for this local name, as in that
// case we will report an error when declaring the variable.
}
void AddNamespaceImport(const Entry* entry, Zone* zone) {
DCHECK_NULL(entry->import_name);
DCHECK_NULL(entry->export_name);
DCHECK_NOT_NULL(entry->local_name);
DCHECK_LE(0, entry->module_request);
namespace_imports_.Add(entry, zone);
}
Handle<FixedArray> SerializeRegularExports(Isolate* isolate,
Zone* zone) const;
void DeserializeRegularExports(Isolate* isolate, AstValueFactory* avfactory,
Handle<FixedArray> data);
private:
ZoneList<ModuleEntry*> exports_;
ZoneList<const ModuleEntry*> special_imports_;
ZoneMap<const AstRawString*, const ModuleEntry*> regular_imports_;
// TODO(neis): Use STL datastructure instead of ZoneList?
ZoneMap<const AstRawString*, int> module_requests_;
ZoneList<const Entry*> special_exports_;
ZoneList<const Entry*> namespace_imports_;
ZoneMultimap<const AstRawString*, Entry*> regular_exports_;
ZoneMap<const AstRawString*, const Entry*> regular_imports_;
// If there are multiple export entries with the same export name, return the
// last of them (in source order). Otherwise return nullptr.
const Entry* FindDuplicateExport(Zone* zone) const;
// Find any implicitly indirect exports and make them explicit.
//
@ -116,7 +190,15 @@ class ModuleDescriptor : public ZoneObject {
// into:
// import {a as b} from "X"; export {a as c} from "X";
// (The import entry is never deleted.)
void MakeIndirectExportsExplicit();
void MakeIndirectExportsExplicit(Zone* zone);
int AddModuleRequest(const AstRawString* specifier) {
DCHECK_NOT_NULL(specifier);
auto it = module_requests_
.insert(std::make_pair(specifier, module_requests_.size()))
.first;
return it->second;
}
};
} // namespace internal

75
deps/v8/src/ast/prettyprinter.cc поставляемый
Просмотреть файл

@ -9,6 +9,7 @@
#include "src/ast/ast-value-factory.h"
#include "src/ast/scopes.h"
#include "src/base/platform/platform.h"
#include "src/globals.h"
namespace v8 {
namespace internal {
@ -603,8 +604,8 @@ void AstPrinter::PrintLiteralWithModeIndented(const char* info,
PrintLiteralIndented(info, value, true);
} else {
EmbeddedVector<char, 256> buf;
int pos = SNPrintF(buf, "%s (mode = %s", info,
Variable::Mode2String(var->mode()));
int pos =
SNPrintF(buf, "%s (mode = %s", info, VariableMode2String(var->mode()));
SNPrintF(buf + pos, ")");
PrintLiteralIndented(buf.start(), value, true);
}
@ -870,6 +871,9 @@ void AstPrinter::PrintTryStatement(TryStatement* node) {
case HandlerTable::DESUGARING:
prediction = "DESUGARING";
break;
case HandlerTable::ASYNC_AWAIT:
prediction = "ASYNC_AWAIT";
break;
}
Print(" %s\n", prediction);
}
@ -897,34 +901,27 @@ void AstPrinter::VisitClassLiteral(ClassLiteral* node) {
if (node->extends() != nullptr) {
PrintIndentedVisit("EXTENDS", node->extends());
}
PrintProperties(node->properties());
PrintClassProperties(node->properties());
}
void AstPrinter::PrintProperties(
ZoneList<ObjectLiteral::Property*>* properties) {
void AstPrinter::PrintClassProperties(
ZoneList<ClassLiteral::Property*>* properties) {
for (int i = 0; i < properties->length(); i++) {
ObjectLiteral::Property* property = properties->at(i);
ClassLiteral::Property* property = properties->at(i);
const char* prop_kind = nullptr;
switch (property->kind()) {
case ObjectLiteral::Property::CONSTANT:
prop_kind = "CONSTANT";
case ClassLiteral::Property::METHOD:
prop_kind = "METHOD";
break;
case ObjectLiteral::Property::COMPUTED:
prop_kind = "COMPUTED";
break;
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
prop_kind = "MATERIALIZED_LITERAL";
break;
case ObjectLiteral::Property::PROTOTYPE:
prop_kind = "PROTOTYPE";
break;
case ObjectLiteral::Property::GETTER:
case ClassLiteral::Property::GETTER:
prop_kind = "GETTER";
break;
case ObjectLiteral::Property::SETTER:
case ClassLiteral::Property::SETTER:
prop_kind = "SETTER";
break;
case ClassLiteral::Property::FIELD:
prop_kind = "FIELD";
break;
}
EmbeddedVector<char, 128> buf;
SNPrintF(buf, "PROPERTY%s - %s", property->is_static() ? " - STATIC" : "",
@ -986,7 +983,40 @@ void AstPrinter::VisitObjectLiteral(ObjectLiteral* node) {
EmbeddedVector<char, 128> buf;
SNPrintF(buf, "literal_index = %d\n", node->literal_index());
PrintIndented(buf.start());
PrintProperties(node->properties());
PrintObjectProperties(node->properties());
}
void AstPrinter::PrintObjectProperties(
ZoneList<ObjectLiteral::Property*>* properties) {
for (int i = 0; i < properties->length(); i++) {
ObjectLiteral::Property* property = properties->at(i);
const char* prop_kind = nullptr;
switch (property->kind()) {
case ObjectLiteral::Property::CONSTANT:
prop_kind = "CONSTANT";
break;
case ObjectLiteral::Property::COMPUTED:
prop_kind = "COMPUTED";
break;
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
prop_kind = "MATERIALIZED_LITERAL";
break;
case ObjectLiteral::Property::PROTOTYPE:
prop_kind = "PROTOTYPE";
break;
case ObjectLiteral::Property::GETTER:
prop_kind = "GETTER";
break;
case ObjectLiteral::Property::SETTER:
prop_kind = "SETTER";
break;
}
EmbeddedVector<char, 128> buf;
SNPrintF(buf, "PROPERTY - %s", prop_kind);
IndentedScope prop(this, buf.start());
PrintIndentedVisit("KEY", properties->at(i)->key());
PrintIndentedVisit("VALUE", properties->at(i)->value());
}
}
@ -1028,9 +1058,6 @@ void AstPrinter::VisitVariableProxy(VariableProxy* node) {
case VariableLocation::CONTEXT:
SNPrintF(buf + pos, " context[%d]", var->index());
break;
case VariableLocation::GLOBAL:
SNPrintF(buf + pos, " global[%d]", var->index());
break;
case VariableLocation::LOOKUP:
SNPrintF(buf + pos, " lookup");
break;

3
deps/v8/src/ast/prettyprinter.h поставляемый
Просмотреть файл

@ -93,7 +93,8 @@ class AstPrinter final : public AstVisitor<AstPrinter> {
Variable* var,
Handle<Object> value);
void PrintLabelsIndented(ZoneList<const AstRawString*>* labels);
void PrintProperties(ZoneList<ObjectLiteral::Property*>* properties);
void PrintObjectProperties(ZoneList<ObjectLiteral::Property*>* properties);
void PrintClassProperties(ZoneList<ClassLiteral::Property*>* properties);
void PrintTryStatement(TryStatement* try_statement);
void inc_indent() { indent_++; }

666
deps/v8/src/ast/scopeinfo.cc поставляемый
Просмотреть файл

@ -2,33 +2,92 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/ast/scopeinfo.h"
#include <stdlib.h>
#include "src/ast/context-slot-cache.h"
#include "src/ast/scopes.h"
#include "src/ast/variables.h"
#include "src/bootstrapper.h"
namespace v8 {
namespace internal {
// An entry in ModuleVariableEntries consists of several slots:
enum ModuleVariableEntryOffset {
kModuleVariableNameOffset,
kModuleVariableIndexOffset,
kModuleVariablePropertiesOffset,
kModuleVariableEntryLength // Sentinel value.
};
Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
Scope* scope) {
// Collect stack and context locals.
ZoneList<Variable*> stack_locals(scope->StackLocalCount(), zone);
ZoneList<Variable*> context_locals(scope->ContextLocalCount(), zone);
ZoneList<Variable*> context_globals(scope->ContextGlobalCount(), zone);
#ifdef DEBUG
bool ScopeInfo::Equals(ScopeInfo* other) const {
if (length() != other->length()) return false;
for (int index = 0; index < length(); ++index) {
Object* entry = get(index);
Object* other_entry = other->get(index);
if (entry->IsSmi()) {
if (entry != other_entry) return false;
} else {
if (HeapObject::cast(entry)->map()->instance_type() !=
HeapObject::cast(other_entry)->map()->instance_type()) {
return false;
}
if (entry->IsString()) {
if (!String::cast(entry)->Equals(String::cast(other_entry))) {
return false;
}
} else if (entry->IsScopeInfo()) {
if (!ScopeInfo::cast(entry)->Equals(ScopeInfo::cast(other_entry))) {
return false;
}
} else if (entry->IsModuleInfo()) {
if (!ModuleInfo::cast(entry)->Equals(ModuleInfo::cast(other_entry))) {
return false;
}
} else {
UNREACHABLE();
return false;
}
}
}
return true;
}
#endif
Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone, Scope* scope,
MaybeHandle<ScopeInfo> outer_scope) {
// Collect variables.
ZoneList<Variable*>* locals = scope->locals();
int stack_local_count = 0;
int context_local_count = 0;
int module_vars_count = 0;
// Stack allocated block scope variables are allocated in the parent
// declaration scope, but are recorded in the block scope's scope info. First
// slot index indicates at which offset a particular scope starts in the
// parent declaration scope.
int first_slot_index = 0;
for (int i = 0; i < locals->length(); i++) {
Variable* var = locals->at(i);
switch (var->location()) {
case VariableLocation::LOCAL:
if (stack_local_count == 0) first_slot_index = var->index();
stack_local_count++;
break;
case VariableLocation::CONTEXT:
context_local_count++;
break;
case VariableLocation::MODULE:
module_vars_count++;
break;
default:
break;
}
}
DCHECK(module_vars_count == 0 || scope->is_module_scope());
scope->CollectStackAndContextLocals(&stack_locals, &context_locals,
&context_globals);
const int stack_local_count = stack_locals.length();
const int context_local_count = context_locals.length();
const int context_global_count = context_globals.length();
// Make sure we allocate the correct amount.
DCHECK_EQ(scope->ContextLocalCount(), context_local_count);
DCHECK_EQ(scope->ContextGlobalCount(), context_global_count);
// Determine use and location of the "this" binding if it is present.
VariableAllocationInfo receiver_info;
@ -53,7 +112,6 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
// Determine use and location of the function variable if it is present.
VariableAllocationInfo function_name_info;
VariableMode function_variable_mode;
if (scope->is_function_scope() &&
scope->AsDeclarationScope()->function_var() != nullptr) {
Variable* var = scope->AsDeclarationScope()->function_var();
@ -65,20 +123,21 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
DCHECK(var->IsStackLocal());
function_name_info = STACK;
}
function_variable_mode = var->mode();
} else {
function_name_info = NONE;
function_variable_mode = VAR;
}
DCHECK(context_global_count == 0 || scope->scope_type() == SCRIPT_SCOPE);
const bool has_function_name = function_name_info != NONE;
const bool has_receiver = receiver_info == STACK || receiver_info == CONTEXT;
const int parameter_count = scope->num_parameters();
const bool has_outer_scope_info = !outer_scope.is_null();
const int length = kVariablePartIndex + parameter_count +
(1 + stack_local_count) + 2 * context_local_count +
2 * context_global_count +
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0) +
(has_outer_scope_info ? 1 : 0) +
(scope->is_module_scope()
? 2 + kModuleVariableEntryLength * module_vars_count
: 0);
Factory* factory = isolate->factory();
Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
@ -96,27 +155,29 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
}
// Encode the flags.
int flags = ScopeTypeField::encode(scope->scope_type()) |
CallsEvalField::encode(scope->calls_eval()) |
LanguageModeField::encode(scope->language_mode()) |
DeclarationScopeField::encode(scope->is_declaration_scope()) |
ReceiverVariableField::encode(receiver_info) |
HasNewTargetField::encode(has_new_target) |
FunctionVariableField::encode(function_name_info) |
FunctionVariableMode::encode(function_variable_mode) |
AsmModuleField::encode(asm_module) |
AsmFunctionField::encode(asm_function) |
HasSimpleParametersField::encode(has_simple_parameters) |
FunctionKindField::encode(function_kind);
int flags =
ScopeTypeField::encode(scope->scope_type()) |
CallsEvalField::encode(scope->calls_eval()) |
LanguageModeField::encode(scope->language_mode()) |
DeclarationScopeField::encode(scope->is_declaration_scope()) |
ReceiverVariableField::encode(receiver_info) |
HasNewTargetField::encode(has_new_target) |
FunctionVariableField::encode(function_name_info) |
AsmModuleField::encode(asm_module) |
AsmFunctionField::encode(asm_function) |
HasSimpleParametersField::encode(has_simple_parameters) |
FunctionKindField::encode(function_kind) |
HasOuterScopeInfoField::encode(has_outer_scope_info) |
IsDebugEvaluateScopeField::encode(scope->is_debug_evaluate_scope());
scope_info->SetFlags(flags);
scope_info->SetParameterCount(parameter_count);
scope_info->SetStackLocalCount(stack_local_count);
scope_info->SetContextLocalCount(context_local_count);
scope_info->SetContextGlobalCount(context_global_count);
int index = kVariablePartIndex;
// Add parameters.
DCHECK(index == scope_info->ParameterEntriesIndex());
DCHECK_EQ(index, scope_info->ParameterNamesIndex());
if (scope->is_declaration_scope()) {
for (int i = 0; i < parameter_count; ++i) {
scope_info->set(index++,
@ -124,68 +185,66 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
}
}
// Add stack locals' names. We are assuming that the stack locals'
// slots are allocated in increasing order, so we can simply add
// them to the ScopeInfo object.
int first_slot_index;
if (stack_local_count > 0) {
first_slot_index = stack_locals[0]->index();
} else {
first_slot_index = 0;
}
DCHECK(index == scope_info->StackLocalFirstSlotIndex());
// Add stack locals' names, context locals' names and info, module variables'
// names and info. We are assuming that the stack locals' slots are allocated
// in increasing order, so we can simply add them to the ScopeInfo object.
// Context locals are added using their index.
DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
scope_info->set(index++, Smi::FromInt(first_slot_index));
DCHECK(index == scope_info->StackLocalEntriesIndex());
for (int i = 0; i < stack_local_count; ++i) {
DCHECK(stack_locals[i]->index() == first_slot_index + i);
scope_info->set(index++, *stack_locals[i]->name());
DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
int stack_local_base = index;
int context_local_base = stack_local_base + stack_local_count;
int context_local_info_base = context_local_base + context_local_count;
int module_var_entry = scope_info->ModuleVariablesIndex();
for (int i = 0; i < locals->length(); ++i) {
Variable* var = locals->at(i);
switch (var->location()) {
case VariableLocation::LOCAL: {
int local_index = var->index() - first_slot_index;
DCHECK_LE(0, local_index);
DCHECK_LT(local_index, stack_local_count);
scope_info->set(stack_local_base + local_index, *var->name());
break;
}
case VariableLocation::CONTEXT: {
// Due to duplicate parameters, context locals aren't guaranteed to come
// in order.
int local_index = var->index() - Context::MIN_CONTEXT_SLOTS;
DCHECK_LE(0, local_index);
DCHECK_LT(local_index, context_local_count);
uint32_t info = VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned());
scope_info->set(context_local_base + local_index, *var->name());
scope_info->set(context_local_info_base + local_index,
Smi::FromInt(info));
break;
}
case VariableLocation::MODULE: {
scope_info->set(module_var_entry + kModuleVariableNameOffset,
*var->name());
scope_info->set(module_var_entry + kModuleVariableIndexOffset,
Smi::FromInt(var->index()));
uint32_t properties =
VariableModeField::encode(var->mode()) |
InitFlagField::encode(var->initialization_flag()) |
MaybeAssignedFlagField::encode(var->maybe_assigned());
scope_info->set(module_var_entry + kModuleVariablePropertiesOffset,
Smi::FromInt(properties));
module_var_entry += kModuleVariableEntryLength;
break;
}
default:
break;
}
}
// Due to usage analysis, context-allocated locals are not necessarily in
// increasing order: Some of them may be parameters which are allocated before
// the non-parameter locals. When the non-parameter locals are sorted
// according to usage, the allocated slot indices may not be in increasing
// order with the variable list anymore. Thus, we first need to sort them by
// context slot index before adding them to the ScopeInfo object.
context_locals.Sort(&Variable::CompareIndex);
// Add context locals' names.
DCHECK(index == scope_info->ContextLocalNameEntriesIndex());
for (int i = 0; i < context_local_count; ++i) {
scope_info->set(index++, *context_locals[i]->name());
}
// Add context globals' names.
DCHECK(index == scope_info->ContextGlobalNameEntriesIndex());
for (int i = 0; i < context_global_count; ++i) {
scope_info->set(index++, *context_globals[i]->name());
}
// Add context locals' info.
DCHECK(index == scope_info->ContextLocalInfoEntriesIndex());
for (int i = 0; i < context_local_count; ++i) {
Variable* var = context_locals[i];
uint32_t value =
ContextLocalMode::encode(var->mode()) |
ContextLocalInitFlag::encode(var->initialization_flag()) |
ContextLocalMaybeAssignedFlag::encode(var->maybe_assigned());
scope_info->set(index++, Smi::FromInt(value));
}
// Add context globals' info.
DCHECK(index == scope_info->ContextGlobalInfoEntriesIndex());
for (int i = 0; i < context_global_count; ++i) {
Variable* var = context_globals[i];
// TODO(ishell): do we need this kind of info for globals here?
uint32_t value =
ContextLocalMode::encode(var->mode()) |
ContextLocalInitFlag::encode(var->initialization_flag()) |
ContextLocalMaybeAssignedFlag::encode(var->maybe_assigned());
scope_info->set(index++, Smi::FromInt(value));
}
index += stack_local_count + 2 * context_local_count;
// If the receiver is allocated, add its index.
DCHECK(index == scope_info->ReceiverEntryIndex());
DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
if (has_receiver) {
int var_index = scope->AsDeclarationScope()->receiver()->index();
scope_info->set(index++, Smi::FromInt(var_index));
@ -194,7 +253,7 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
}
// If present, add the function variable name and its index.
DCHECK(index == scope_info->FunctionNameEntryIndex());
DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
if (has_function_name) {
int var_index = scope->AsDeclarationScope()->function_var()->index();
scope_info->set(index++,
@ -204,75 +263,130 @@ Handle<ScopeInfo> ScopeInfo::Create(Isolate* isolate, Zone* zone,
var_index == scope_info->ContextLength() - 1);
}
DCHECK(index == scope_info->length());
DCHECK(scope->num_parameters() == scope_info->ParameterCount());
DCHECK(scope->num_heap_slots() == scope_info->ContextLength() ||
(scope->num_heap_slots() == kVariablePartIndex &&
scope_info->ContextLength() == 0));
// If present, add the outer scope info.
DCHECK(index == scope_info->OuterScopeInfoIndex());
if (has_outer_scope_info) {
scope_info->set(index++, *outer_scope.ToHandleChecked());
}
// Module-specific information (only for module scopes).
if (scope->is_module_scope()) {
Handle<ModuleInfo> module_info =
ModuleInfo::New(isolate, zone, scope->AsModuleScope()->module());
DCHECK_EQ(index, scope_info->ModuleInfoIndex());
scope_info->set(index++, *module_info);
DCHECK_EQ(index, scope_info->ModuleVariableCountIndex());
scope_info->set(index++, Smi::FromInt(module_vars_count));
DCHECK_EQ(index, scope_info->ModuleVariablesIndex());
// The variable entries themselves have already been written above.
index += kModuleVariableEntryLength * module_vars_count;
}
DCHECK_EQ(index, scope_info->length());
DCHECK_EQ(scope->num_parameters(), scope_info->ParameterCount());
DCHECK_EQ(scope->num_heap_slots(), scope_info->ContextLength());
return scope_info;
}
Handle<ScopeInfo> ScopeInfo::CreateForWithScope(
Isolate* isolate, MaybeHandle<ScopeInfo> outer_scope) {
const bool has_outer_scope_info = !outer_scope.is_null();
const int length = kVariablePartIndex + 1 + (has_outer_scope_info ? 1 : 0);
Factory* factory = isolate->factory();
Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
// Encode the flags.
int flags =
ScopeTypeField::encode(WITH_SCOPE) | CallsEvalField::encode(false) |
LanguageModeField::encode(SLOPPY) | DeclarationScopeField::encode(false) |
ReceiverVariableField::encode(NONE) | HasNewTargetField::encode(false) |
FunctionVariableField::encode(NONE) | AsmModuleField::encode(false) |
AsmFunctionField::encode(false) | HasSimpleParametersField::encode(true) |
FunctionKindField::encode(kNormalFunction) |
HasOuterScopeInfoField::encode(has_outer_scope_info) |
IsDebugEvaluateScopeField::encode(false);
scope_info->SetFlags(flags);
scope_info->SetParameterCount(0);
scope_info->SetStackLocalCount(0);
scope_info->SetContextLocalCount(0);
int index = kVariablePartIndex;
DCHECK_EQ(index, scope_info->ParameterNamesIndex());
DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
scope_info->set(index++, Smi::FromInt(0));
DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
DCHECK(index == scope_info->OuterScopeInfoIndex());
if (has_outer_scope_info) {
scope_info->set(index++, *outer_scope.ToHandleChecked());
}
DCHECK_EQ(index, scope_info->length());
DCHECK_EQ(0, scope_info->ParameterCount());
DCHECK_EQ(Context::MIN_CONTEXT_SLOTS, scope_info->ContextLength());
return scope_info;
}
Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
DCHECK(isolate->bootstrapper()->IsActive());
const int stack_local_count = 0;
const int context_local_count = 1;
const int context_global_count = 0;
const bool has_simple_parameters = true;
const VariableAllocationInfo receiver_info = CONTEXT;
const VariableAllocationInfo function_name_info = NONE;
const VariableMode function_variable_mode = VAR;
const bool has_function_name = false;
const bool has_receiver = true;
const bool has_outer_scope_info = false;
const int parameter_count = 0;
const int length = kVariablePartIndex + parameter_count +
(1 + stack_local_count) + 2 * context_local_count +
2 * context_global_count +
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0);
(has_receiver ? 1 : 0) + (has_function_name ? 2 : 0) +
(has_outer_scope_info ? 1 : 0);
Factory* factory = isolate->factory();
Handle<ScopeInfo> scope_info = factory->NewScopeInfo(length);
// Encode the flags.
int flags = ScopeTypeField::encode(SCRIPT_SCOPE) |
CallsEvalField::encode(false) |
LanguageModeField::encode(SLOPPY) |
DeclarationScopeField::encode(true) |
ReceiverVariableField::encode(receiver_info) |
FunctionVariableField::encode(function_name_info) |
FunctionVariableMode::encode(function_variable_mode) |
AsmModuleField::encode(false) | AsmFunctionField::encode(false) |
HasSimpleParametersField::encode(has_simple_parameters) |
FunctionKindField::encode(FunctionKind::kNormalFunction);
int flags =
ScopeTypeField::encode(SCRIPT_SCOPE) | CallsEvalField::encode(false) |
LanguageModeField::encode(SLOPPY) | DeclarationScopeField::encode(true) |
ReceiverVariableField::encode(receiver_info) |
FunctionVariableField::encode(function_name_info) |
AsmModuleField::encode(false) | AsmFunctionField::encode(false) |
HasSimpleParametersField::encode(has_simple_parameters) |
FunctionKindField::encode(FunctionKind::kNormalFunction) |
HasOuterScopeInfoField::encode(has_outer_scope_info) |
IsDebugEvaluateScopeField::encode(false);
scope_info->SetFlags(flags);
scope_info->SetParameterCount(parameter_count);
scope_info->SetStackLocalCount(stack_local_count);
scope_info->SetContextLocalCount(context_local_count);
scope_info->SetContextGlobalCount(context_global_count);
int index = kVariablePartIndex;
const int first_slot_index = 0;
DCHECK(index == scope_info->StackLocalFirstSlotIndex());
DCHECK_EQ(index, scope_info->StackLocalFirstSlotIndex());
scope_info->set(index++, Smi::FromInt(first_slot_index));
DCHECK(index == scope_info->StackLocalEntriesIndex());
DCHECK_EQ(index, scope_info->StackLocalNamesIndex());
// Here we add info for context-allocated "this".
DCHECK(index == scope_info->ContextLocalNameEntriesIndex());
DCHECK_EQ(index, scope_info->ContextLocalNamesIndex());
scope_info->set(index++, *isolate->factory()->this_string());
DCHECK(index == scope_info->ContextLocalInfoEntriesIndex());
const uint32_t value = ContextLocalMode::encode(CONST) |
ContextLocalInitFlag::encode(kCreatedInitialized) |
ContextLocalMaybeAssignedFlag::encode(kNotAssigned);
DCHECK_EQ(index, scope_info->ContextLocalInfosIndex());
const uint32_t value = VariableModeField::encode(CONST) |
InitFlagField::encode(kCreatedInitialized) |
MaybeAssignedFlagField::encode(kNotAssigned);
scope_info->set(index++, Smi::FromInt(value));
// And here we record that this scopeinfo binds a receiver.
DCHECK(index == scope_info->ReceiverEntryIndex());
DCHECK_EQ(index, scope_info->ReceiverInfoIndex());
const int receiver_index = Context::MIN_CONTEXT_SLOTS + 0;
scope_info->set(index++, Smi::FromInt(receiver_index));
DCHECK(index == scope_info->FunctionNameEntryIndex());
DCHECK_EQ(index, scope_info->FunctionNameInfoIndex());
DCHECK_EQ(index, scope_info->OuterScopeInfoIndex());
DCHECK_EQ(index, scope_info->length());
DCHECK_EQ(scope_info->ParameterCount(), 0);
DCHECK_EQ(scope_info->ContextLength(), Context::MIN_CONTEXT_SLOTS + 1);
@ -282,12 +396,12 @@ Handle<ScopeInfo> ScopeInfo::CreateGlobalThisBinding(Isolate* isolate) {
ScopeInfo* ScopeInfo::Empty(Isolate* isolate) {
return reinterpret_cast<ScopeInfo*>(isolate->heap()->empty_fixed_array());
return isolate->heap()->empty_scope_info();
}
ScopeType ScopeInfo::scope_type() {
DCHECK(length() > 0);
DCHECK_LT(0, length());
return ScopeTypeField::decode(Flags());
}
@ -325,19 +439,17 @@ int ScopeInfo::StackSlotCount() {
int ScopeInfo::ContextLength() {
if (length() > 0) {
int context_locals = ContextLocalCount();
int context_globals = ContextGlobalCount();
bool function_name_context_slot =
FunctionVariableField::decode(Flags()) == CONTEXT;
bool has_context = context_locals > 0 || context_globals > 0 ||
function_name_context_slot ||
bool has_context = context_locals > 0 || function_name_context_slot ||
scope_type() == WITH_SCOPE ||
(scope_type() == BLOCK_SCOPE && CallsSloppyEval() &&
is_declaration_scope()) ||
is_declaration_scope()) ||
(scope_type() == FUNCTION_SCOPE && CallsSloppyEval()) ||
scope_type() == MODULE_SCOPE;
if (has_context) {
return Context::MIN_CONTEXT_SLOTS + context_locals + context_globals +
return Context::MIN_CONTEXT_SLOTS + context_locals +
(function_name_context_slot ? 1 : 0);
}
}
@ -375,6 +487,30 @@ bool ScopeInfo::HasFunctionName() {
}
}
bool ScopeInfo::HasOuterScopeInfo() {
if (length() > 0) {
return HasOuterScopeInfoField::decode(Flags());
} else {
return false;
}
}
bool ScopeInfo::IsDebugEvaluateScope() {
if (length() > 0) {
return IsDebugEvaluateScopeField::decode(Flags());
} else {
return false;
}
}
void ScopeInfo::SetIsDebugEvaluateScope() {
if (length() > 0) {
DCHECK_EQ(scope_type(), WITH_SCOPE);
SetFlags(Flags() | IsDebugEvaluateScopeField::encode(true));
} else {
UNREACHABLE();
}
}
bool ScopeInfo::HasHeapAllocatedLocals() {
if (length() > 0) {
@ -392,68 +528,85 @@ bool ScopeInfo::HasContext() {
String* ScopeInfo::FunctionName() {
DCHECK(HasFunctionName());
return String::cast(get(FunctionNameEntryIndex()));
return String::cast(get(FunctionNameInfoIndex()));
}
ScopeInfo* ScopeInfo::OuterScopeInfo() {
DCHECK(HasOuterScopeInfo());
return ScopeInfo::cast(get(OuterScopeInfoIndex()));
}
ModuleInfo* ScopeInfo::ModuleDescriptorInfo() {
DCHECK(scope_type() == MODULE_SCOPE);
return ModuleInfo::cast(get(ModuleInfoIndex()));
}
String* ScopeInfo::ParameterName(int var) {
DCHECK(0 <= var && var < ParameterCount());
int info_index = ParameterEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, ParameterCount());
int info_index = ParameterNamesIndex() + var;
return String::cast(get(info_index));
}
String* ScopeInfo::LocalName(int var) {
DCHECK(0 <= var && var < LocalCount());
DCHECK(StackLocalEntriesIndex() + StackLocalCount() ==
ContextLocalNameEntriesIndex());
int info_index = StackLocalEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, LocalCount());
DCHECK(StackLocalNamesIndex() + StackLocalCount() ==
ContextLocalNamesIndex());
int info_index = StackLocalNamesIndex() + var;
return String::cast(get(info_index));
}
String* ScopeInfo::StackLocalName(int var) {
DCHECK(0 <= var && var < StackLocalCount());
int info_index = StackLocalEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, StackLocalCount());
int info_index = StackLocalNamesIndex() + var;
return String::cast(get(info_index));
}
int ScopeInfo::StackLocalIndex(int var) {
DCHECK(0 <= var && var < StackLocalCount());
DCHECK_LE(0, var);
DCHECK_LT(var, StackLocalCount());
int first_slot_index = Smi::cast(get(StackLocalFirstSlotIndex()))->value();
return first_slot_index + var;
}
String* ScopeInfo::ContextLocalName(int var) {
DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
int info_index = ContextLocalNameEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount());
int info_index = ContextLocalNamesIndex() + var;
return String::cast(get(info_index));
}
VariableMode ScopeInfo::ContextLocalMode(int var) {
DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
int info_index = ContextLocalInfoEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount());
int info_index = ContextLocalInfosIndex() + var;
int value = Smi::cast(get(info_index))->value();
return ContextLocalMode::decode(value);
return VariableModeField::decode(value);
}
InitializationFlag ScopeInfo::ContextLocalInitFlag(int var) {
DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
int info_index = ContextLocalInfoEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount());
int info_index = ContextLocalInfosIndex() + var;
int value = Smi::cast(get(info_index))->value();
return ContextLocalInitFlag::decode(value);
return InitFlagField::decode(value);
}
MaybeAssignedFlag ScopeInfo::ContextLocalMaybeAssignedFlag(int var) {
DCHECK(0 <= var && var < ContextLocalCount() + ContextGlobalCount());
int info_index = ContextLocalInfoEntriesIndex() + var;
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount());
int info_index = ContextLocalInfosIndex() + var;
int value = Smi::cast(get(info_index))->value();
return ContextLocalMaybeAssignedFlag::decode(value);
return MaybeAssignedFlagField::decode(value);
}
bool ScopeInfo::VariableIsSynthetic(String* name) {
@ -470,8 +623,8 @@ int ScopeInfo::StackSlotIndex(String* name) {
DCHECK(name->IsInternalizedString());
if (length() > 0) {
int first_slot_index = Smi::cast(get(StackLocalFirstSlotIndex()))->value();
int start = StackLocalEntriesIndex();
int end = StackLocalEntriesIndex() + StackLocalCount();
int start = StackLocalNamesIndex();
int end = start + StackLocalCount();
for (int i = start; i < end; ++i) {
if (name == get(i)) {
return i - start + first_slot_index;
@ -481,27 +634,54 @@ int ScopeInfo::StackSlotIndex(String* name) {
return -1;
}
int ScopeInfo::ModuleIndex(Handle<String> name, VariableMode* mode,
InitializationFlag* init_flag,
MaybeAssignedFlag* maybe_assigned_flag) {
DCHECK_EQ(scope_type(), MODULE_SCOPE);
DCHECK(name->IsInternalizedString());
DCHECK_NOT_NULL(mode);
DCHECK_NOT_NULL(init_flag);
DCHECK_NOT_NULL(maybe_assigned_flag);
int module_vars_count = Smi::cast(get(ModuleVariableCountIndex()))->value();
int entry = ModuleVariablesIndex();
for (int i = 0; i < module_vars_count; ++i) {
if (*name == get(entry + kModuleVariableNameOffset)) {
int index = Smi::cast(get(entry + kModuleVariableIndexOffset))->value();
int properties =
Smi::cast(get(entry + kModuleVariablePropertiesOffset))->value();
*mode = VariableModeField::decode(properties);
*init_flag = InitFlagField::decode(properties);
*maybe_assigned_flag = MaybeAssignedFlagField::decode(properties);
return index;
}
entry += kModuleVariableEntryLength;
}
return -1;
}
int ScopeInfo::ContextSlotIndex(Handle<ScopeInfo> scope_info,
Handle<String> name, VariableMode* mode,
InitializationFlag* init_flag,
MaybeAssignedFlag* maybe_assigned_flag) {
DCHECK(name->IsInternalizedString());
DCHECK(mode != NULL);
DCHECK(init_flag != NULL);
DCHECK_NOT_NULL(mode);
DCHECK_NOT_NULL(init_flag);
DCHECK_NOT_NULL(maybe_assigned_flag);
if (scope_info->length() > 0) {
ContextSlotCache* context_slot_cache =
scope_info->GetIsolate()->context_slot_cache();
int result = context_slot_cache->Lookup(*scope_info, *name, mode, init_flag,
maybe_assigned_flag);
if (result != ContextSlotCache::kNotFound) {
DCHECK(result < scope_info->ContextLength());
DCHECK_LT(result, scope_info->ContextLength());
return result;
}
int start = scope_info->ContextLocalNameEntriesIndex();
int end = scope_info->ContextLocalNameEntriesIndex() +
scope_info->ContextLocalCount();
int start = scope_info->ContextLocalNamesIndex();
int end = start + scope_info->ContextLocalCount();
for (int i = start; i < end; ++i) {
if (*name == scope_info->get(i)) {
int var = i - start;
@ -512,7 +692,7 @@ int ScopeInfo::ContextSlotIndex(Handle<ScopeInfo> scope_info,
context_slot_cache->Update(scope_info, name, *mode, *init_flag,
*maybe_assigned_flag, result);
DCHECK(result < scope_info->ContextLength());
DCHECK_LT(result, scope_info->ContextLength());
return result;
}
}
@ -520,46 +700,14 @@ int ScopeInfo::ContextSlotIndex(Handle<ScopeInfo> scope_info,
context_slot_cache->Update(scope_info, name, TEMPORARY,
kNeedsInitialization, kNotAssigned, -1);
}
return -1;
}
int ScopeInfo::ContextGlobalSlotIndex(Handle<ScopeInfo> scope_info,
Handle<String> name, VariableMode* mode,
InitializationFlag* init_flag,
MaybeAssignedFlag* maybe_assigned_flag) {
DCHECK(name->IsInternalizedString());
DCHECK(mode != NULL);
DCHECK(init_flag != NULL);
if (scope_info->length() > 0) {
// This is to ensure that ContextLocalMode() and co. queries would work.
DCHECK_EQ(scope_info->ContextGlobalNameEntriesIndex(),
scope_info->ContextLocalNameEntriesIndex() +
scope_info->ContextLocalCount());
int base = scope_info->ContextLocalNameEntriesIndex();
int start = scope_info->ContextGlobalNameEntriesIndex();
int end = scope_info->ContextGlobalNameEntriesIndex() +
scope_info->ContextGlobalCount();
for (int i = start; i < end; ++i) {
if (*name == scope_info->get(i)) {
int var = i - base;
*mode = scope_info->ContextLocalMode(var);
*init_flag = scope_info->ContextLocalInitFlag(var);
*maybe_assigned_flag = scope_info->ContextLocalMaybeAssignedFlag(var);
int result = Context::MIN_CONTEXT_SLOTS + var;
DCHECK(result < scope_info->ContextLength());
return result;
}
}
}
return -1;
}
String* ScopeInfo::ContextSlotName(int slot_index) {
int const var = slot_index - Context::MIN_CONTEXT_SLOTS;
DCHECK_LE(0, var);
DCHECK_LT(var, ContextLocalCount() + ContextGlobalCount());
DCHECK_LT(var, ContextLocalCount());
return ContextLocalName(var);
}
@ -572,8 +720,8 @@ int ScopeInfo::ParameterIndex(String* name) {
// last declaration of that parameter is used
// inside a function (and thus we need to look
// at the last index). Was bug# 1110337.
int start = ParameterEntriesIndex();
int end = ParameterEntriesIndex() + ParameterCount();
int start = ParameterNamesIndex();
int end = start + ParameterCount();
for (int i = end - 1; i >= start; --i) {
if (name == get(i)) {
return i - start;
@ -586,19 +734,16 @@ int ScopeInfo::ParameterIndex(String* name) {
int ScopeInfo::ReceiverContextSlotIndex() {
if (length() > 0 && ReceiverVariableField::decode(Flags()) == CONTEXT)
return Smi::cast(get(ReceiverEntryIndex()))->value();
return Smi::cast(get(ReceiverInfoIndex()))->value();
return -1;
}
int ScopeInfo::FunctionContextSlotIndex(String* name, VariableMode* mode) {
int ScopeInfo::FunctionContextSlotIndex(String* name) {
DCHECK(name->IsInternalizedString());
DCHECK(mode != NULL);
if (length() > 0) {
if (FunctionVariableField::decode(Flags()) == CONTEXT &&
FunctionName() == name) {
*mode = FunctionVariableMode::decode(Flags());
return Smi::cast(get(FunctionNameEntryIndex() + 1))->value();
return Smi::cast(get(FunctionNameInfoIndex() + 1))->value();
}
}
return -1;
@ -609,51 +754,45 @@ FunctionKind ScopeInfo::function_kind() {
return FunctionKindField::decode(Flags());
}
int ScopeInfo::ParameterEntriesIndex() {
DCHECK(length() > 0);
int ScopeInfo::ParameterNamesIndex() {
DCHECK_LT(0, length());
return kVariablePartIndex;
}
int ScopeInfo::StackLocalFirstSlotIndex() {
return ParameterEntriesIndex() + ParameterCount();
return ParameterNamesIndex() + ParameterCount();
}
int ScopeInfo::StackLocalNamesIndex() { return StackLocalFirstSlotIndex() + 1; }
int ScopeInfo::StackLocalEntriesIndex() {
return StackLocalFirstSlotIndex() + 1;
int ScopeInfo::ContextLocalNamesIndex() {
return StackLocalNamesIndex() + StackLocalCount();
}
int ScopeInfo::ContextLocalNameEntriesIndex() {
return StackLocalEntriesIndex() + StackLocalCount();
int ScopeInfo::ContextLocalInfosIndex() {
return ContextLocalNamesIndex() + ContextLocalCount();
}
int ScopeInfo::ContextGlobalNameEntriesIndex() {
return ContextLocalNameEntriesIndex() + ContextLocalCount();
int ScopeInfo::ReceiverInfoIndex() {
return ContextLocalInfosIndex() + ContextLocalCount();
}
int ScopeInfo::ContextLocalInfoEntriesIndex() {
return ContextGlobalNameEntriesIndex() + ContextGlobalCount();
int ScopeInfo::FunctionNameInfoIndex() {
return ReceiverInfoIndex() + (HasAllocatedReceiver() ? 1 : 0);
}
int ScopeInfo::ContextGlobalInfoEntriesIndex() {
return ContextLocalInfoEntriesIndex() + ContextLocalCount();
int ScopeInfo::OuterScopeInfoIndex() {
return FunctionNameInfoIndex() + (HasFunctionName() ? 2 : 0);
}
int ScopeInfo::ReceiverEntryIndex() {
return ContextGlobalInfoEntriesIndex() + ContextGlobalCount();
int ScopeInfo::ModuleInfoIndex() {
return OuterScopeInfoIndex() + (HasOuterScopeInfo() ? 1 : 0);
}
int ScopeInfo::ModuleVariableCountIndex() { return ModuleInfoIndex() + 1; }
int ScopeInfo::FunctionNameEntryIndex() {
return ReceiverEntryIndex() + (HasAllocatedReceiver() ? 1 : 0);
}
int ScopeInfo::ModuleVariablesIndex() { return ModuleVariableCountIndex() + 1; }
#ifdef DEBUG
@ -686,19 +825,84 @@ void ScopeInfo::Print() {
PrintF("{");
if (length() > 0) {
PrintList("parameters", 0, ParameterEntriesIndex(),
ParameterEntriesIndex() + ParameterCount(), this);
PrintList("stack slots", 0, StackLocalEntriesIndex(),
StackLocalEntriesIndex() + StackLocalCount(), this);
PrintList("parameters", 0, ParameterNamesIndex(),
ParameterNamesIndex() + ParameterCount(), this);
PrintList("stack slots", 0, StackLocalNamesIndex(),
StackLocalNamesIndex() + StackLocalCount(), this);
PrintList("context slots", Context::MIN_CONTEXT_SLOTS,
ContextLocalNameEntriesIndex(),
ContextLocalNameEntriesIndex() + ContextLocalCount(), this);
ContextLocalNamesIndex(),
ContextLocalNamesIndex() + ContextLocalCount(), this);
// TODO(neis): Print module stuff if present.
}
PrintF("}\n");
}
#endif // DEBUG
Handle<ModuleInfoEntry> ModuleInfoEntry::New(Isolate* isolate,
Handle<Object> export_name,
Handle<Object> local_name,
Handle<Object> import_name,
Handle<Object> module_request) {
Handle<ModuleInfoEntry> result = isolate->factory()->NewModuleInfoEntry();
result->set(kExportNameIndex, *export_name);
result->set(kLocalNameIndex, *local_name);
result->set(kImportNameIndex, *import_name);
result->set(kModuleRequestIndex, *module_request);
return result;
}
Handle<ModuleInfo> ModuleInfo::New(Isolate* isolate, Zone* zone,
ModuleDescriptor* descr) {
// Serialize module requests.
Handle<FixedArray> module_requests = isolate->factory()->NewFixedArray(
static_cast<int>(descr->module_requests().size()));
for (const auto& elem : descr->module_requests()) {
module_requests->set(elem.second, *elem.first->string());
}
// Serialize special exports.
Handle<FixedArray> special_exports =
isolate->factory()->NewFixedArray(descr->special_exports().length());
{
int i = 0;
for (auto entry : descr->special_exports()) {
special_exports->set(i++, *entry->Serialize(isolate));
}
}
// Serialize namespace imports.
Handle<FixedArray> namespace_imports =
isolate->factory()->NewFixedArray(descr->namespace_imports().length());
{
int i = 0;
for (auto entry : descr->namespace_imports()) {
namespace_imports->set(i++, *entry->Serialize(isolate));
}
}
// Serialize regular exports.
Handle<FixedArray> regular_exports =
descr->SerializeRegularExports(isolate, zone);
// Serialize regular imports.
Handle<FixedArray> regular_imports = isolate->factory()->NewFixedArray(
static_cast<int>(descr->regular_imports().size()));
{
int i = 0;
for (const auto& elem : descr->regular_imports()) {
regular_imports->set(i++, *elem.second->Serialize(isolate));
}
}
Handle<ModuleInfo> result = isolate->factory()->NewModuleInfo();
result->set(kModuleRequestsIndex, *module_requests);
result->set(kSpecialExportsIndex, *special_exports);
result->set(kRegularExportsIndex, *regular_exports);
result->set(kNamespaceImportsIndex, *namespace_imports);
result->set(kRegularImportsIndex, *regular_imports);
return result;
}
} // namespace internal
} // namespace v8

18
deps/v8/src/ast/scopeinfo.h поставляемый
Просмотреть файл

@ -1,18 +0,0 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_AST_SCOPEINFO_H_
#define V8_AST_SCOPEINFO_H_
#include "src/allocation.h"
#include "src/ast/modules.h"
#include "src/ast/variables.h"
namespace v8 {
namespace internal {
} // namespace internal
} // namespace v8
#endif // V8_AST_SCOPEINFO_H_

1349
deps/v8/src/ast/scopes.cc поставляемый

Разница между файлами не показана из-за своего большого размера Загрузить разницу

370
deps/v8/src/ast/scopes.h поставляемый
Просмотреть файл

@ -5,15 +5,22 @@
#ifndef V8_AST_SCOPES_H_
#define V8_AST_SCOPES_H_
#include "src/ast/ast.h"
#include "src/base/hashmap.h"
#include "src/globals.h"
#include "src/zone.h"
#include "src/objects.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
class AstNodeFactory;
class AstValueFactory;
class AstRawString;
class Declaration;
class ParseInfo;
class SloppyBlockFunctionStatement;
class StringSet;
class VariableProxy;
// A hash map to support fast variable declaration and lookup.
class VariableMap: public ZoneHashMap {
@ -21,34 +28,14 @@ class VariableMap: public ZoneHashMap {
explicit VariableMap(Zone* zone);
Variable* Declare(Zone* zone, Scope* scope, const AstRawString* name,
VariableMode mode, Variable::Kind kind,
VariableMode mode, VariableKind kind,
InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned,
bool* added = nullptr);
Variable* Lookup(const AstRawString* name);
};
// The dynamic scope part holds hash maps for the variables that will
// be looked up dynamically from within eval and with scopes. The objects
// are allocated on-demand from Scope::NonLocal to avoid wasting memory
// and setup time for scopes that don't need them.
class DynamicScopePart : public ZoneObject {
public:
explicit DynamicScopePart(Zone* zone) {
for (int i = 0; i < 3; i++)
maps_[i] = new(zone->New(sizeof(VariableMap))) VariableMap(zone);
}
VariableMap* GetMap(VariableMode mode) {
int index = mode - DYNAMIC;
DCHECK(index >= 0 && index < 3);
return maps_[index];
}
private:
VariableMap *maps_[3];
void Remove(Variable* var);
void Add(Zone* zone, Variable* var);
};
@ -60,6 +47,7 @@ class SloppyBlockFunctionMap : public ZoneHashMap {
SloppyBlockFunctionStatement* statement);
};
enum class AnalyzeMode { kRegular, kDebugger };
// Global invariants after AST construction: Each reference (i.e. identifier)
// to a JavaScript variable (including global properties) is represented by a
@ -86,6 +74,7 @@ class Scope: public ZoneObject {
void SetScopeName(const AstRawString* scope_name) {
scope_name_ = scope_name;
}
void set_needs_migration() { needs_migration_ = true; }
#endif
// TODO(verwaest): Is this needed on Scope?
@ -106,18 +95,14 @@ class Scope: public ZoneObject {
Scope* outer_scope_;
Scope* top_inner_scope_;
VariableProxy* top_unresolved_;
int top_temp_;
int top_local_;
int top_decl_;
};
// Compute top scope and allocate variables. For lazy compilation the top
// scope only contains the single lazily compiled function, so this
// doesn't re-allocate variables repeatedly.
static void Analyze(ParseInfo* info);
enum class DeserializationMode { kDeserializeOffHeap, kKeepScopeInfo };
enum class DeserializationMode { kIncludingVariables, kScopesOnly };
static Scope* DeserializeScopeChain(Isolate* isolate, Zone* zone,
Context* context,
ScopeInfo* scope_info,
DeclarationScope* script_scope,
AstValueFactory* ast_value_factory,
DeserializationMode deserialization_mode);
@ -127,6 +112,11 @@ class Scope: public ZoneObject {
// tree and its children are reparented.
Scope* FinalizeBlockScope();
bool HasBeenRemoved() const;
// Find the first scope that hasn't been removed.
Scope* GetUnremovedScope();
// Inserts outer_scope into this scope's scope chain (and removes this
// from the current outer_scope_'s inner scope list).
// Assumes outer_scope_ is non-null.
@ -142,7 +132,13 @@ class Scope: public ZoneObject {
// Declarations
// Lookup a variable in this scope. Returns the variable or NULL if not found.
Variable* LookupLocal(const AstRawString* name);
Variable* LookupLocal(const AstRawString* name) {
Variable* result = variables_.Lookup(name);
if (result != nullptr || scope_info_.is_null()) return result;
return LookupInScopeInfo(name);
}
Variable* LookupInScopeInfo(const AstRawString* name);
// Lookup a variable in this scope or outer scopes.
// Returns the variable or NULL if not found.
@ -151,36 +147,28 @@ class Scope: public ZoneObject {
// Declare a local variable in this scope. If the variable has been
// declared before, the previously declared variable is returned.
Variable* DeclareLocal(const AstRawString* name, VariableMode mode,
InitializationFlag init_flag, Variable::Kind kind,
InitializationFlag init_flag, VariableKind kind,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
Variable* DeclareVariable(Declaration* declaration, VariableMode mode,
InitializationFlag init,
bool allow_harmony_restrictive_generators,
bool* sloppy_mode_block_scope_function_redefinition,
bool* ok);
// Declarations list.
ZoneList<Declaration*>* declarations() { return &decls_; }
ZoneList<Variable*>* locals() { return &locals_; }
// Create a new unresolved variable.
VariableProxy* NewUnresolved(AstNodeFactory* factory,
const AstRawString* name,
int start_position = kNoSourcePosition,
int end_position = kNoSourcePosition,
Variable::Kind kind = Variable::NORMAL) {
// Note that we must not share the unresolved variables with
// the same name because they may be removed selectively via
// RemoveUnresolved().
DCHECK(!already_resolved_);
DCHECK_EQ(factory->zone(), zone());
VariableProxy* proxy =
factory->NewVariableProxy(name, kind, start_position, end_position);
proxy->set_next_unresolved(unresolved_);
unresolved_ = proxy;
return proxy;
}
VariableKind kind = NORMAL_VARIABLE);
void AddUnresolved(VariableProxy* proxy) {
DCHECK(!already_resolved_);
DCHECK(!proxy->is_resolved());
proxy->set_next_unresolved(unresolved_);
unresolved_ = proxy;
}
void AddUnresolved(VariableProxy* proxy);
// Remove a unresolved variable. During parsing, an unresolved variable
// may have been added optimistically, but then only the variable name
@ -189,6 +177,7 @@ class Scope: public ZoneObject {
// allocated globally as a "ghost" variable. RemoveUnresolved removes
// such a variable again if it was added; otherwise this is a no-op.
bool RemoveUnresolved(VariableProxy* var);
bool RemoveUnresolved(const AstRawString* name);
// Creates a new temporary variable in this scope's TemporaryScope. The
// name is only used for printing and cannot be used to find the variable.
@ -198,11 +187,6 @@ class Scope: public ZoneObject {
// TODO(verwaest): Move to DeclarationScope?
Variable* NewTemporary(const AstRawString* name);
// Adds the specific declaration node to the list of declarations in
// this scope. The declarations are processed as part of entering
// the scope; see codegen.cc:ProcessDeclarations.
void AddDeclaration(Declaration* declaration);
// ---------------------------------------------------------------------------
// Illegal redeclaration support.
@ -223,10 +207,15 @@ class Scope: public ZoneObject {
// Scope-specific info.
// Inform the scope and outer scopes that the corresponding code contains an
// eval call.
// eval call. We don't record eval calls from innner scopes in the outer most
// script scope, as we only see those when parsing eagerly. If we recorded the
// calls then, the outer most script scope would look different depending on
// whether we parsed eagerly or not which is undesirable.
void RecordEvalCall() {
scope_calls_eval_ = true;
for (Scope* scope = this; scope != nullptr; scope = scope->outer_scope()) {
inner_scope_calls_eval_ = true;
for (Scope* scope = outer_scope(); scope && !scope->is_script_scope();
scope = scope->outer_scope()) {
scope->inner_scope_calls_eval_ = true;
}
}
@ -353,24 +342,16 @@ class Scope: public ZoneObject {
// ---------------------------------------------------------------------------
// Variable allocation.
// Collect stack and context allocated local variables in this scope. Note
// that the function variable - if present - is not collected and should be
// handled separately.
void CollectStackAndContextLocals(ZoneList<Variable*>* stack_locals,
ZoneList<Variable*>* context_locals,
ZoneList<Variable*>* context_globals);
// Result of variable allocation.
int num_stack_slots() const { return num_stack_slots_; }
int num_heap_slots() const { return num_heap_slots_; }
int num_global_slots() const { return num_global_slots_; }
int StackLocalCount() const;
int ContextLocalCount() const;
int ContextGlobalCount() const;
// Determine if we can parse a function literal in this scope lazily.
bool AllowsLazyParsing() const;
// Determine if we can parse a function literal in this scope lazily without
// caring about the unresolved variables within.
bool AllowsLazyParsingWithoutUnresolvedVariables() const;
// The number of contexts between this and scope; zero if this == scope.
int ContextChainLength(Scope* scope) const;
@ -398,10 +379,13 @@ class Scope: public ZoneObject {
// 'this' is bound, and what determines the function kind.
DeclarationScope* GetReceiverScope();
// Creates a scope info if it doesn't already exist.
Handle<ScopeInfo> GetScopeInfo(Isolate* isolate);
// Find the module scope, assuming there is one.
ModuleScope* GetModuleScope();
// GetScopeInfo() must have been called once to create the ScopeInfo.
// Find the innermost outer scope that needs a context.
Scope* GetOuterScopeWithContext();
// Analyze() must have been called once to create the ScopeInfo.
Handle<ScopeInfo> scope_info() {
DCHECK(!scope_info_.is_null());
return scope_info_;
@ -436,9 +420,11 @@ class Scope: public ZoneObject {
// Retrieve `IsSimpleParameterList` of current or outer function.
bool HasSimpleParameters();
void set_is_debug_evaluate_scope() { is_debug_evaluate_scope_ = true; }
bool is_debug_evaluate_scope() const { return is_debug_evaluate_scope_; }
bool is_lazily_parsed() const { return is_lazily_parsed_; }
protected:
// Creates a script scope.
explicit Scope(Zone* zone);
void set_language_mode(LanguageMode language_mode) {
@ -447,16 +433,32 @@ class Scope: public ZoneObject {
private:
Variable* Declare(Zone* zone, Scope* scope, const AstRawString* name,
VariableMode mode, Variable::Kind kind,
VariableMode mode, VariableKind kind,
InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned) {
bool added;
Variable* var =
variables_.Declare(zone, scope, name, mode, kind, initialization_flag,
maybe_assigned_flag, &added);
if (added) ordered_variables_.Add(var, zone);
if (added) locals_.Add(var, zone);
return var;
}
// This method should only be invoked on scopes created during parsing (i.e.,
// not deserialized from a context). Also, since NeedsContext() is only
// returning a valid result after variables are resolved, NeedsScopeInfo()
// should also be invoked after resolution.
bool NeedsScopeInfo() const {
DCHECK(!already_resolved_);
// A lazily parsed scope doesn't contain enough information to create a
// ScopeInfo from it.
if (is_lazily_parsed_) return false;
// The debugger expects all functions to have scope infos.
// TODO(jochen|yangguo): Remove this requirement.
if (is_function_scope()) return true;
return NeedsContext();
}
Zone* zone_;
// Scope tree.
@ -473,9 +475,7 @@ class Scope: public ZoneObject {
// In case of non-scopeinfo-backed scopes, this contains the variables of the
// map above in order of addition.
// TODO(verwaest): Thread through Variable.
ZoneList<Variable*> ordered_variables_;
// Variables that must be looked up dynamically.
DynamicScopePart* dynamics_;
ZoneList<Variable*> locals_;
// Unresolved variables referred to from this scope. The proxies themselves
// form a linked list of all unresolved proxies.
VariableProxy* unresolved_;
@ -490,7 +490,10 @@ class Scope: public ZoneObject {
// True if it doesn't need scope resolution (e.g., if the scope was
// constructed based on a serialized scope info or a catch context).
bool already_resolved_ : 1;
bool already_resolved_;
// True if this scope may contain objects from a temp zone that needs to be
// fixed up.
bool needs_migration_;
#endif
// Source positions.
@ -500,7 +503,6 @@ class Scope: public ZoneObject {
// Computed via AllocateVariables.
int num_stack_slots_;
int num_heap_slots_;
int num_global_slots_;
// The scope type.
const ScopeType scope_type_;
@ -525,79 +527,30 @@ class Scope: public ZoneObject {
// True if it holds 'var' declarations.
bool is_declaration_scope_ : 1;
bool is_lazily_parsed_ : 1;
// Create a non-local variable with a given name.
// These variables are looked up dynamically at runtime.
Variable* NonLocal(const AstRawString* name, VariableMode mode);
// Variable resolution.
// Possible results of a recursive variable lookup telling if and how a
// variable is bound. These are returned in the output parameter *binding_kind
// of the LookupRecursive function.
enum BindingKind {
// The variable reference could be statically resolved to a variable binding
// which is returned. There is no 'with' statement between the reference and
// the binding and no scope between the reference scope (inclusive) and
// binding scope (exclusive) makes a sloppy 'eval' call.
BOUND,
// The variable reference could be statically resolved to a variable binding
// which is returned. There is no 'with' statement between the reference and
// the binding, but some scope between the reference scope (inclusive) and
// binding scope (exclusive) makes a sloppy 'eval' call, that might
// possibly introduce variable bindings shadowing the found one. Thus the
// found variable binding is just a guess.
BOUND_EVAL_SHADOWED,
// The variable reference could not be statically resolved to any binding
// and thus should be considered referencing a global variable. NULL is
// returned. The variable reference is not inside any 'with' statement and
// no scope between the reference scope (inclusive) and script scope
// (exclusive) makes a sloppy 'eval' call.
UNBOUND,
// The variable reference could not be statically resolved to any binding
// NULL is returned. The variable reference is not inside any 'with'
// statement, but some scope between the reference scope (inclusive) and
// script scope (exclusive) makes a sloppy 'eval' call, that might
// possibly introduce a variable binding. Thus the reference should be
// considered referencing a global variable unless it is shadowed by an
// 'eval' introduced binding.
UNBOUND_EVAL_SHADOWED,
// The variable could not be statically resolved and needs to be looked up
// dynamically. NULL is returned. There are two possible reasons:
// * A 'with' statement has been encountered and there is no variable
// binding for the name between the variable reference and the 'with'.
// The variable potentially references a property of the 'with' object.
// * The code is being executed as part of a call to 'eval' and the calling
// context chain contains either a variable binding for the name or it
// contains a 'with' context.
DYNAMIC_LOOKUP
};
// Lookup a variable reference given by name recursively starting with this
// scope, and stopping when reaching the outer_scope_end scope. If the code is
// executed because of a call to 'eval', the context parameter should be set
// to the calling context of 'eval'.
Variable* LookupRecursive(VariableProxy* proxy, BindingKind* binding_kind,
AstNodeFactory* factory,
Scope* outer_scope_end = nullptr);
void ResolveTo(ParseInfo* info, BindingKind binding_kind,
VariableProxy* proxy, Variable* var);
void ResolveVariable(ParseInfo* info, VariableProxy* proxy,
AstNodeFactory* factory);
void ResolveVariablesRecursively(ParseInfo* info, AstNodeFactory* factory);
Variable* LookupRecursive(VariableProxy* proxy, Scope* outer_scope_end);
void ResolveTo(ParseInfo* info, VariableProxy* proxy, Variable* var);
void ResolveVariable(ParseInfo* info, VariableProxy* proxy);
void ResolveVariablesRecursively(ParseInfo* info);
// Finds free variables of this scope. This mutates the unresolved variables
// list along the way, so full resolution cannot be done afterwards.
// If a ParseInfo* is passed, non-free variables will be resolved.
VariableProxy* FetchFreeVariables(DeclarationScope* max_outer_scope,
bool try_to_resolve = true,
ParseInfo* info = nullptr,
VariableProxy* stack = nullptr);
// Scope analysis.
void PropagateScopeInfo();
// Predicates.
bool MustAllocate(Variable* var);
bool MustAllocateInContext(Variable* var);
@ -610,15 +563,18 @@ class Scope: public ZoneObject {
void AllocateNonParameterLocalsAndDeclaredGlobals();
void AllocateVariablesRecursively();
void AllocateScopeInfosRecursively(Isolate* isolate, AnalyzeMode mode,
MaybeHandle<ScopeInfo> outer_scope);
// Construct a scope based on the scope info.
Scope(Zone* zone, Scope* inner_scope, ScopeType type,
Handle<ScopeInfo> scope_info);
Scope(Zone* zone, ScopeType type, Handle<ScopeInfo> scope_info);
// Construct a catch scope with a binding for the name.
Scope(Zone* zone, Scope* inner_scope,
const AstRawString* catch_variable_name);
Scope(Zone* zone, const AstRawString* catch_variable_name,
Handle<ScopeInfo> scope_info);
void AddInnerScope(Scope* inner_scope) {
DCHECK_EQ(!needs_migration_, inner_scope->zone() == zone());
inner_scope->sibling_ = inner_scope_;
inner_scope_ = inner_scope;
inner_scope->outer_scope_ = this;
@ -641,9 +597,6 @@ class Scope: public ZoneObject {
void SetDefaults();
void DeserializeScopeInfo(Isolate* isolate,
AstValueFactory* ast_value_factory);
friend class DeclarationScope;
};
@ -651,10 +604,10 @@ class DeclarationScope : public Scope {
public:
DeclarationScope(Zone* zone, Scope* outer_scope, ScopeType scope_type,
FunctionKind function_kind = kNormalFunction);
DeclarationScope(Zone* zone, Scope* inner_scope, ScopeType scope_type,
DeclarationScope(Zone* zone, ScopeType scope_type,
Handle<ScopeInfo> scope_info);
// Creates a script scope.
explicit DeclarationScope(Zone* zone);
DeclarationScope(Zone* zone, AstValueFactory* ast_value_factory);
bool IsDeclaredParameter(const AstRawString* name) {
// If IsSimpleParameterList is false, duplicate parameters are not allowed,
@ -681,23 +634,29 @@ class DeclarationScope : public Scope {
IsClassConstructor(function_kind())));
}
void SetScriptScopeInfo(Handle<ScopeInfo> scope_info) {
DCHECK(is_script_scope());
DCHECK(scope_info_.is_null());
scope_info_ = scope_info;
}
bool asm_module() const { return asm_module_; }
void set_asm_module() { asm_module_ = true; }
void set_asm_module();
bool asm_function() const { return asm_function_; }
void set_asm_function() { asm_module_ = true; }
void DeclareThis(AstValueFactory* ast_value_factory);
void DeclareArguments(AstValueFactory* ast_value_factory);
void DeclareDefaultFunctionVariables(AstValueFactory* ast_value_factory);
// This lookup corresponds to a lookup in the "intermediate" scope sitting
// between this scope and the outer scope. (ECMA-262, 3rd., requires that
// the name of named function literal is kept in an intermediate scope
// in between this scope and the next outer scope.)
Variable* LookupFunctionVar(const AstRawString* name);
// Declare the function variable for a function literal. This variable
// is in an intermediate scope between this function scope and the the
// outer scope. Only possible for function scopes; at most one variable.
//
// This function needs to be called after all other variables have been
// declared in the scope. It will add a variable for {name} to {variables_};
// either the function variable itself, or a non-local in case the function
// calls sloppy eval.
Variable* DeclareFunctionVar(const AstRawString* name);
// Declare a parameter in this scope. When there are duplicated
@ -712,7 +671,7 @@ class DeclarationScope : public Scope {
// scope) by a reference to an unresolved variable with no intervening
// with statements or eval calls.
Variable* DeclareDynamicGlobal(const AstRawString* name,
Variable::Kind variable_kind);
VariableKind variable_kind);
// The variable corresponding to the 'this' value.
Variable* receiver() {
@ -739,43 +698,36 @@ class DeclarationScope : public Scope {
}
// Parameters. The left-most parameter has index 0.
// Only valid for function scopes.
// Only valid for function and module scopes.
Variable* parameter(int index) const {
DCHECK(is_function_scope());
DCHECK(is_function_scope() || is_module_scope());
return params_[index];
}
// Returns the default function arity excluding default or rest parameters.
int default_function_length() const { return arity_; }
// This will be used to set the length of the function, by default.
// Class field initializers use this property to indicate the number of
// fields being initialized.
int arity() const { return arity_; }
// Returns the number of formal parameters, up to but not including the
// rest parameter index (if the function has rest parameters), i.e. it
// says 2 for
//
// function foo(a, b) { ... }
//
// and
//
// function foo(a, b, ...c) { ... }
//
// but for
//
// function foo(a, b, c = 1) { ... }
//
// we return 3 here.
// Normal code should not need to call this. Class field initializers use this
// property to indicate the number of fields being initialized.
void set_arity(int arity) { arity_ = arity; }
// Returns the number of formal parameters, excluding a possible rest
// parameter. Examples:
// function foo(a, b) {} ==> 2
// function foo(a, b, ...c) {} ==> 2
// function foo(a, b, c = 1) {} ==> 3
int num_parameters() const {
return has_rest_parameter() ? params_.length() - 1 : params_.length();
return has_rest_ ? params_.length() - 1 : params_.length();
}
// A function can have at most one rest parameter. Returns Variable* or NULL.
Variable* rest_parameter(int* index) const {
*index = rest_index_;
if (rest_index_ < 0) return nullptr;
return params_[rest_index_];
// The function's rest parameter (nullptr if there is none).
Variable* rest_parameter() const {
return has_rest_ ? params_[params_.length() - 1] : nullptr;
}
bool has_rest_parameter() const { return rest_index_ >= 0; }
bool has_simple_parameters() const { return has_simple_parameters_; }
// TODO(caitp): manage this state in a better way. PreParser must be able to
@ -803,44 +755,40 @@ class DeclarationScope : public Scope {
return this_function_;
}
// Adds a temporary variable in this scope's TemporaryScope. This is for
// adjusting the scope of temporaries used when desugaring parameter
// Adds a local variable in this scope's locals list. This is for adjusting
// the scope of temporaries and do-expression vars when desugaring parameter
// initializers.
void AddTemporary(Variable* var) {
void AddLocal(Variable* var) {
DCHECK(!already_resolved_);
// Temporaries are only placed in ClosureScopes.
DCHECK_EQ(GetClosureScope(), this);
temps_.Add(var, zone());
locals_.Add(var, zone());
}
ZoneList<Variable*>* temps() { return &temps_; }
void DeclareSloppyBlockFunction(const AstRawString* name,
SloppyBlockFunctionStatement* statement) {
sloppy_block_function_map_.Declare(zone(), name, statement);
}
// Go through sloppy_block_function_map_ and hoist those (into this scope)
// which should be hoisted.
void HoistSloppyBlockFunctions(AstNodeFactory* factory);
SloppyBlockFunctionMap* sloppy_block_function_map() {
return &sloppy_block_function_map_;
}
// Resolve and fill in the allocation information for all variables
// in this scopes. Must be called *after* all scopes have been
// processed (parsed) to ensure that unresolved variables can be
// resolved properly.
//
// In the case of code compiled and run using 'eval', the context
// parameter is the context in which eval was called. In all other
// cases the context parameter is an empty handle.
void AllocateVariables(ParseInfo* info, AstNodeFactory* factory);
// Compute top scope and allocate variables. For lazy compilation the top
// scope only contains the single lazily compiled function, so this
// doesn't re-allocate variables repeatedly.
static void Analyze(ParseInfo* info, AnalyzeMode mode);
// To be called during parsing. Do just enough scope analysis that we can
// discard the Scope for lazily compiled functions. In particular, this
// records variables which cannot be resolved inside the Scope (we don't yet
// know what they will resolve to since the outer Scopes are incomplete) and
// migrates them into migrate_to.
void AnalyzePartially(DeclarationScope* migrate_to,
AstNodeFactory* ast_node_factory);
void AnalyzePartially(AstNodeFactory* ast_node_factory);
Handle<StringSet> CollectNonLocals(ParseInfo* info,
Handle<StringSet> non_locals);
@ -868,9 +816,21 @@ class DeclarationScope : public Scope {
void AllocateParameterLocals();
void AllocateReceiver();
void ResetAfterPreparsing(AstValueFactory* ast_value_factory, bool aborted);
private:
void AllocateParameter(Variable* var, int index);
// Resolve and fill in the allocation information for all variables
// in this scopes. Must be called *after* all scopes have been
// processed (parsed) to ensure that unresolved variables can be
// resolved properly.
//
// In the case of code compiled and run using 'eval', the context
// parameter is the context in which eval was called. In all other
// cases the context parameter is an empty handle.
void AllocateVariables(ParseInfo* info, AnalyzeMode mode);
void SetDefaults();
// If the scope is a function scope, this is the function kind.
@ -882,6 +842,8 @@ class DeclarationScope : public Scope {
// This scope's outer context is an asm module.
bool asm_function_ : 1;
bool force_eager_compilation_ : 1;
// This function scope has a rest parameter.
bool has_rest_ : 1;
// This scope has a parameter called "arguments".
bool has_arguments_parameter_ : 1;
// This scope uses "super" property ('super.foo').
@ -889,9 +851,6 @@ class DeclarationScope : public Scope {
// Info about the parameter list of a function.
int arity_;
int rest_index_;
// Compiler-allocated (user-invisible) temporaries.
ZoneList<Variable*> temps_;
// Parameter list in source order.
ZoneList<Variable*> params_;
// Map of function names to lists of functions defined in sloppy blocks
@ -910,7 +869,14 @@ class DeclarationScope : public Scope {
class ModuleScope final : public DeclarationScope {
public:
ModuleScope(Zone* zone, DeclarationScope* script_scope,
ModuleScope(DeclarationScope* script_scope,
AstValueFactory* ast_value_factory);
// Deserialization.
// The generated ModuleDescriptor does not preserve all information. In
// particular, its module_requests map will be empty because we no longer need
// the map after parsing.
ModuleScope(Isolate* isolate, Handle<ScopeInfo> scope_info,
AstValueFactory* ast_value_factory);
ModuleDescriptor* module() const {

45
deps/v8/src/ast/variables.cc поставляемый
Просмотреть файл

@ -13,36 +13,20 @@ namespace internal {
// ----------------------------------------------------------------------------
// Implementation Variable.
const char* Variable::Mode2String(VariableMode mode) {
switch (mode) {
case VAR: return "VAR";
case CONST_LEGACY: return "CONST_LEGACY";
case LET: return "LET";
case CONST: return "CONST";
case DYNAMIC: return "DYNAMIC";
case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL";
case DYNAMIC_LOCAL: return "DYNAMIC_LOCAL";
case TEMPORARY: return "TEMPORARY";
}
UNREACHABLE();
return NULL;
}
Variable::Variable(Scope* scope, const AstRawString* name, VariableMode mode,
Kind kind, InitializationFlag initialization_flag,
VariableKind kind, InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag)
: scope_(scope),
name_(name),
mode_(mode),
kind_(kind),
location_(VariableLocation::UNALLOCATED),
local_if_not_shadowed_(nullptr),
index_(-1),
initializer_position_(kNoSourcePosition),
local_if_not_shadowed_(NULL),
force_context_allocation_(false),
is_used_(false),
initialization_flag_(initialization_flag),
maybe_assigned_(maybe_assigned_flag) {
bit_field_(MaybeAssignedFlagField::encode(maybe_assigned_flag) |
InitializationFlagField::encode(initialization_flag) |
VariableModeField::encode(mode) | IsUsedField::encode(false) |
ForceContextAllocationField::encode(false) |
LocationField::encode(VariableLocation::UNALLOCATED) |
VariableKindField::encode(kind)) {
// Var declared variables never need initialization.
DCHECK(!(mode == VAR && initialization_flag == kNeedsInitialization));
}
@ -51,8 +35,8 @@ Variable::Variable(Scope* scope, const AstRawString* name, VariableMode mode,
bool Variable::IsGlobalObjectProperty() const {
// Temporaries are never global, they must always be allocated in the
// activation frame.
return (IsDynamicVariableMode(mode_) ||
(IsDeclaredVariableMode(mode_) && !IsLexicalVariableMode(mode_))) &&
return (IsDynamicVariableMode(mode()) ||
(IsDeclaredVariableMode(mode()) && !IsLexicalVariableMode(mode()))) &&
scope_ != NULL && scope_->is_script_scope();
}
@ -60,17 +44,10 @@ bool Variable::IsGlobalObjectProperty() const {
bool Variable::IsStaticGlobalObjectProperty() const {
// Temporaries are never global, they must always be allocated in the
// activation frame.
return (IsDeclaredVariableMode(mode_) && !IsLexicalVariableMode(mode_)) &&
return (IsDeclaredVariableMode(mode()) && !IsLexicalVariableMode(mode())) &&
scope_ != NULL && scope_->is_script_scope();
}
int Variable::CompareIndex(Variable* const* v, Variable* const* w) {
int x = (*v)->index();
int y = (*w)->index();
// Consider sorting them according to type as well?
return x - y;
}
} // namespace internal
} // namespace v8

122
deps/v8/src/ast/variables.h поставляемый
Просмотреть файл

@ -6,7 +6,8 @@
#define V8_AST_VARIABLES_H_
#include "src/ast/ast-value-factory.h"
#include "src/zone.h"
#include "src/globals.h"
#include "src/zone/zone.h"
namespace v8 {
namespace internal {
@ -17,15 +18,10 @@ namespace internal {
// after binding and variable allocation.
class Variable final : public ZoneObject {
public:
enum Kind { NORMAL, FUNCTION, THIS, ARGUMENTS };
Variable(Scope* scope, const AstRawString* name, VariableMode mode, Kind kind,
InitializationFlag initialization_flag,
Variable(Scope* scope, const AstRawString* name, VariableMode mode,
VariableKind kind, InitializationFlag initialization_flag,
MaybeAssignedFlag maybe_assigned_flag = kNotAssigned);
// Printing support
static const char* Mode2String(VariableMode mode);
// The source code for an eval() call may refer to a variable that is
// in an outer scope about which we don't know anything (it may not
// be the script scope). scope() is NULL in that case. Currently the
@ -38,51 +34,56 @@ class Variable final : public ZoneObject {
Handle<String> name() const { return name_->string(); }
const AstRawString* raw_name() const { return name_; }
VariableMode mode() const { return mode_; }
VariableMode mode() const { return VariableModeField::decode(bit_field_); }
bool has_forced_context_allocation() const {
return force_context_allocation_;
return ForceContextAllocationField::decode(bit_field_);
}
void ForceContextAllocation() {
DCHECK(IsUnallocated() || IsContextSlot());
force_context_allocation_ = true;
DCHECK(IsUnallocated() || IsContextSlot() ||
location() == VariableLocation::MODULE);
bit_field_ = ForceContextAllocationField::update(bit_field_, true);
}
bool is_used() { return IsUsedField::decode(bit_field_); }
void set_is_used() { bit_field_ = IsUsedField::update(bit_field_, true); }
MaybeAssignedFlag maybe_assigned() const {
return MaybeAssignedFlagField::decode(bit_field_);
}
void set_maybe_assigned() {
bit_field_ = MaybeAssignedFlagField::update(bit_field_, kMaybeAssigned);
}
bool is_used() { return is_used_; }
void set_is_used() { is_used_ = true; }
MaybeAssignedFlag maybe_assigned() const { return maybe_assigned_; }
void set_maybe_assigned() { maybe_assigned_ = kMaybeAssigned; }
int initializer_position() { return initializer_position_; }
void set_initializer_position(int pos) { initializer_position_ = pos; }
bool IsUnallocated() const {
return location_ == VariableLocation::UNALLOCATED;
return location() == VariableLocation::UNALLOCATED;
}
bool IsParameter() const { return location_ == VariableLocation::PARAMETER; }
bool IsStackLocal() const { return location_ == VariableLocation::LOCAL; }
bool IsParameter() const { return location() == VariableLocation::PARAMETER; }
bool IsStackLocal() const { return location() == VariableLocation::LOCAL; }
bool IsStackAllocated() const { return IsParameter() || IsStackLocal(); }
bool IsContextSlot() const { return location_ == VariableLocation::CONTEXT; }
bool IsGlobalSlot() const { return location_ == VariableLocation::GLOBAL; }
bool IsUnallocatedOrGlobalSlot() const {
return IsUnallocated() || IsGlobalSlot();
}
bool IsLookupSlot() const { return location_ == VariableLocation::LOOKUP; }
bool IsContextSlot() const { return location() == VariableLocation::CONTEXT; }
bool IsLookupSlot() const { return location() == VariableLocation::LOOKUP; }
bool IsGlobalObjectProperty() const;
bool IsStaticGlobalObjectProperty() const;
bool is_dynamic() const { return IsDynamicVariableMode(mode_); }
bool is_const_mode() const { return IsImmutableVariableMode(mode_); }
bool is_dynamic() const { return IsDynamicVariableMode(mode()); }
bool binding_needs_init() const {
DCHECK(initialization_flag_ != kNeedsInitialization ||
IsLexicalVariableMode(mode_));
return initialization_flag_ == kNeedsInitialization;
DCHECK(initialization_flag() != kNeedsInitialization ||
IsLexicalVariableMode(mode()));
return initialization_flag() == kNeedsInitialization;
}
bool throw_on_const_assignment(LanguageMode language_mode) const {
return kind() != SLOPPY_FUNCTION_NAME_VARIABLE || is_strict(language_mode);
}
bool is_function() const { return kind_ == FUNCTION; }
bool is_this() const { return kind_ == THIS; }
bool is_arguments() const { return kind_ == ARGUMENTS; }
bool is_function() const { return kind() == FUNCTION_VARIABLE; }
bool is_this() const { return kind() == THIS_VARIABLE; }
bool is_sloppy_function_name() const {
return kind() == SLOPPY_FUNCTION_NAME_VARIABLE;
}
Variable* local_if_not_shadowed() const {
DCHECK(mode_ == DYNAMIC_LOCAL && local_if_not_shadowed_ != NULL);
DCHECK(mode() == DYNAMIC_LOCAL && local_if_not_shadowed_ != NULL);
return local_if_not_shadowed_;
}
@ -90,40 +91,61 @@ class Variable final : public ZoneObject {
local_if_not_shadowed_ = local;
}
VariableLocation location() const { return location_; }
int index() const { return index_; }
VariableLocation location() const {
return LocationField::decode(bit_field_);
}
VariableKind kind() const { return VariableKindField::decode(bit_field_); }
InitializationFlag initialization_flag() const {
return initialization_flag_;
return InitializationFlagField::decode(bit_field_);
}
int index() const { return index_; }
bool IsExport() const {
DCHECK(location() == VariableLocation::MODULE);
return index() == 0;
}
void AllocateTo(VariableLocation location, int index) {
DCHECK(IsUnallocated() || (location_ == location && index_ == index));
location_ = location;
DCHECK(IsUnallocated() ||
(this->location() == location && this->index() == index));
bit_field_ = LocationField::update(bit_field_, location);
DCHECK_EQ(location, this->location());
index_ = index;
}
static int CompareIndex(Variable* const* v, Variable* const* w);
static InitializationFlag DefaultInitializationFlag(VariableMode mode) {
DCHECK(IsDeclaredVariableMode(mode));
return mode == VAR ? kCreatedInitialized : kNeedsInitialization;
}
private:
Scope* scope_;
const AstRawString* name_;
VariableMode mode_;
Kind kind_;
VariableLocation location_;
int index_;
int initializer_position_;
// If this field is set, this variable references the stored locally bound
// variable, but it might be shadowed by variable bindings introduced by
// sloppy 'eval' calls between the reference scope (inclusive) and the
// binding scope (exclusive).
Variable* local_if_not_shadowed_;
int index_;
int initializer_position_;
uint16_t bit_field_;
// Usage info.
bool force_context_allocation_; // set by variable resolver
bool is_used_;
InitializationFlag initialization_flag_;
MaybeAssignedFlag maybe_assigned_;
class VariableModeField : public BitField16<VariableMode, 0, 3> {};
class VariableKindField
: public BitField16<VariableKind, VariableModeField::kNext, 3> {};
class LocationField
: public BitField16<VariableLocation, VariableKindField::kNext, 3> {};
class ForceContextAllocationField
: public BitField16<bool, LocationField::kNext, 1> {};
class IsUsedField
: public BitField16<bool, ForceContextAllocationField::kNext, 1> {};
class InitializationFlagField
: public BitField16<InitializationFlag, IsUsedField::kNext, 2> {};
class MaybeAssignedFlagField
: public BitField16<MaybeAssignedFlag, InitializationFlagField::kNext,
2> {};
};
} // namespace internal
} // namespace v8

16
deps/v8/src/background-parsing-task.cc поставляемый
Просмотреть файл

@ -3,11 +3,19 @@
// found in the LICENSE file.
#include "src/background-parsing-task.h"
#include "src/debug/debug.h"
#include "src/parsing/parser.h"
namespace v8 {
namespace internal {
void StreamedSource::Release() {
parser.reset();
info.reset();
zone.reset();
}
BackgroundParsingTask::BackgroundParsingTask(
StreamedSource* source, ScriptCompiler::CompileOptions options,
int stack_size, Isolate* isolate)
@ -42,9 +50,8 @@ BackgroundParsingTask::BackgroundParsingTask(
// Parser needs to stay alive for finalizing the parsing on the main
// thread.
source_->parser.reset(new Parser(source_->info.get()));
source_->parser->DeserializeScopeChain(
source_->info.get(), Handle<Context>::null(),
Scope::DeserializationMode::kDeserializeOffHeap);
source_->parser->DeserializeScopeChain(source_->info.get(),
MaybeHandle<ScopeInfo>());
}
@ -55,8 +62,7 @@ void BackgroundParsingTask::Run() {
// Reset the stack limit of the parser to reflect correctly that we're on a
// background thread.
uintptr_t stack_limit =
reinterpret_cast<uintptr_t>(&stack_limit) - stack_size_ * KB;
uintptr_t stack_limit = GetCurrentStackPosition() - stack_size_ * KB;
source_->parser->set_stack_limit(stack_limit);
// Nullify the Isolate temporarily so that the background parser doesn't

14
deps/v8/src/background-parsing-task.h поставляемый
Просмотреть файл

@ -7,15 +7,16 @@
#include <memory>
#include "include/v8.h"
#include "src/base/platform/platform.h"
#include "src/base/platform/semaphore.h"
#include "src/compiler.h"
#include "src/parsing/parse-info.h"
#include "src/parsing/parser.h"
#include "src/unicode-cache.h"
namespace v8 {
namespace internal {
class Parser;
class ScriptData;
// Internal representation of v8::ScriptCompiler::StreamedSource. Contains all
@ -26,6 +27,8 @@ struct StreamedSource {
ScriptCompiler::StreamedSource::Encoding encoding)
: source_stream(source_stream), encoding(encoding) {}
void Release();
// Internal implementation of v8::ScriptCompiler::StreamedSource.
std::unique_ptr<ScriptCompiler::ExternalSourceStream> source_stream;
ScriptCompiler::StreamedSource::Encoding encoding;
@ -39,10 +42,9 @@ struct StreamedSource {
std::unique_ptr<ParseInfo> info;
std::unique_ptr<Parser> parser;
private:
// Prevent copying. Not implemented.
StreamedSource(const StreamedSource&);
StreamedSource& operator=(const StreamedSource&);
// Prevent copying.
StreamedSource(const StreamedSource&) = delete;
StreamedSource& operator=(const StreamedSource&) = delete;
};

11
deps/v8/src/bailout-reason.h поставляемый
Просмотреть файл

@ -20,7 +20,6 @@ namespace internal {
V(kArgumentsObjectValueInATestContext, \
"Arguments object value in a test context") \
V(kArrayIndexConstantValueTooBig, "Array index constant value too big") \
V(kAssignmentToArguments, "Assignment to arguments") \
V(kAssignmentToLetVariableBeforeInitialization, \
"Assignment to let variable before initialization") \
V(kAssignmentToLOOKUPVariable, "Assignment to LOOKUP variable") \
@ -64,6 +63,8 @@ namespace internal {
V(kEval, "eval") \
V(kExpectedAllocationSite, "Expected allocation site") \
V(kExpectedBooleanValue, "Expected boolean value") \
V(kExpectedFixedDoubleArrayMap, \
"Expected a fixed double array map in fast shallow clone array literal") \
V(kExpectedFunctionObject, "Expected function object in register") \
V(kExpectedHeapNumber, "Expected HeapNumber") \
V(kExpectedJSReceiver, "Expected object to have receiver type") \
@ -242,10 +243,6 @@ namespace internal {
V(kUnexpectedTypeForRegExpDataFixedArrayExpected, \
"Unexpected type for RegExp data, FixedArray expected") \
V(kUnexpectedValue, "Unexpected value") \
V(kUnsupportedConstCompoundAssignment, \
"Unsupported const compound assignment") \
V(kUnsupportedCountOperationWithConst, \
"Unsupported count operation with const") \
V(kUnsupportedDoubleImmediate, "Unsupported double immediate") \
V(kUnsupportedLetCompoundAssignment, "Unsupported let compound assignment") \
V(kUnsupportedLookupSlotInDeclaration, \
@ -268,9 +265,7 @@ namespace internal {
V(kWrongArgumentCountForInvokeIntrinsic, \
"Wrong number of arguments for intrinsic") \
V(kShouldNotDirectlyEnterOsrFunction, \
"Should not directly enter OSR-compiled function") \
V(kConversionFromImpossibleValue, \
"Reached conversion from value with empty type (i.e., impossible type)")
"Should not directly enter OSR-compiled function")
#define ERROR_MESSAGES_CONSTANTS(C, T) C,
enum BailoutReason {

8
deps/v8/src/base.isolate поставляемый
Просмотреть файл

@ -4,7 +4,6 @@
{
'includes': [
'../third_party/icu/icu.isolate',
'../gypfiles/config/win/msvs_dependencies.isolate',
],
'conditions': [
['v8_use_snapshot=="true" and v8_use_external_startup_data==1', {
@ -15,13 +14,6 @@
],
},
}],
['OS=="mac" and asan==1', {
'variables': {
'files': [
'<(PRODUCT_DIR)/libclang_rt.asan_osx_dynamic.dylib',
],
},
}],
['tsan==1', {
'variables': {
'files': [

44
deps/v8/src/base/accounting-allocator.cc поставляемый
Просмотреть файл

@ -1,44 +0,0 @@
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/accounting-allocator.h"
#include <cstdlib>
#if V8_LIBC_BIONIC
#include <malloc.h> // NOLINT
#endif
namespace v8 {
namespace base {
void* AccountingAllocator::Allocate(size_t bytes) {
void* memory = malloc(bytes);
if (memory) {
AtomicWord current =
NoBarrier_AtomicIncrement(&current_memory_usage_, bytes);
AtomicWord max = NoBarrier_Load(&max_memory_usage_);
while (current > max) {
max = NoBarrier_CompareAndSwap(&max_memory_usage_, max, current);
}
}
return memory;
}
void AccountingAllocator::Free(void* memory, size_t bytes) {
free(memory);
NoBarrier_AtomicIncrement(&current_memory_usage_,
-static_cast<AtomicWord>(bytes));
}
size_t AccountingAllocator::GetCurrentMemoryUsage() const {
return NoBarrier_Load(&current_memory_usage_);
}
size_t AccountingAllocator::GetMaxMemoryUsage() const {
return NoBarrier_Load(&max_memory_usage_);
}
} // namespace base
} // namespace v8

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше