diff --git a/deps/v8/.clang-format b/deps/v8/.clang-format index 96a5eb602702d2..066c8d27c657cf 100644 --- a/deps/v8/.clang-format +++ b/deps/v8/.clang-format @@ -3,7 +3,13 @@ BasedOnStyle: Google DerivePointerAlignment: false MaxEmptyLinesToKeep: 1 -IfMacros: ['IF', 'IF_NOT', 'ELSE', 'ELSE_IF'] +IfMacros: + - IF + - IF_NOT +Macros: + # Make clang-format think TurboShaft `ELSE` expands to just `else`, so that + # it formats well alongside `if` + - ELSE=else StatementMacros: - DECL_CAST - DECL_VERIFIER diff --git a/deps/v8/.gitignore b/deps/v8/.gitignore index 31d395a0fef5a4..d5c64a8e83c0e6 100644 --- a/deps/v8/.gitignore +++ b/deps/v8/.gitignore @@ -134,3 +134,6 @@ bazel-v8 !/third_party/abseil-cpp /third_party/abseil-cpp/.github /third_party/abseil-cpp/ci +!/third_party/fp16 +/third_party/fp16/src/* +!/third_party/fp16/src/include diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index 9bd9ff447e5d03..6de30c78a85421 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -81,6 +81,7 @@ Benjamin Tan Bert Belder Brendon Tiszka Brice Dobry +Bruno Pitrus Burcu Dogan Caitlin Potter Chao Wang @@ -109,6 +110,7 @@ Deepak Mohan Deon Dior Derek Tu Divy Srivastava +Dmitry Bezhetskov Dominic Chen Dominic Farolini Douglas Crosher @@ -232,6 +234,7 @@ Peter Varga Peter Wong PhistucK Pierrick Bouvier +Punith B Nayak Rafal Krypa Raul Tambre Ray Glover diff --git a/deps/v8/BUILD.bazel b/deps/v8/BUILD.bazel index 2351c1e3300e0b..05b7472165ae85 100644 --- a/deps/v8/BUILD.bazel +++ b/deps/v8/BUILD.bazel @@ -674,6 +674,7 @@ filegroup( name = "v8_flags", srcs = [ "src/flags/flag-definitions.h", + "src/flags/flags-impl.h", "src/flags/flags.h", ] + select({ "is_v8_enable_webassembly": ["src/wasm/wasm-feature-flags.h"], @@ -802,7 +803,6 @@ filegroup( "src/base/timezone-cache.h", "src/base/utils/random-number-generator.cc", "src/base/utils/random-number-generator.h", - "src/base/v8-fallthrough.h", "src/base/vector.h", "src/base/virtual-address-space.cc", "src/base/virtual-address-space.h", @@ -1540,6 +1540,7 @@ filegroup( "src/extensions/trigger-failure-extension.cc", "src/extensions/trigger-failure-extension.h", "src/flags/flag-definitions.h", + "src/flags/flags-impl.h", "src/flags/flags.cc", "src/flags/flags.h", "src/handles/global-handles.cc", @@ -1569,8 +1570,8 @@ filegroup( "src/heap/base-space.h", "src/heap/base/active-system-pages.cc", "src/heap/base/active-system-pages.h", - "src/heap/basic-memory-chunk.cc", - "src/heap/basic-memory-chunk.h", + "src/heap/memory-chunk-metadata.cc", + "src/heap/memory-chunk-metadata.h", "src/heap/code-range.cc", "src/heap/code-range.h", "src/heap/trusted-range.cc", @@ -1687,11 +1688,11 @@ filegroup( "src/heap/memory-allocator.h", "src/heap/memory-balancer.cc", "src/heap/memory-balancer.h", + "src/heap/mutable-page.cc", + "src/heap/mutable-page.h", "src/heap/memory-chunk.cc", "src/heap/memory-chunk.h", - "src/heap/memory-chunk-header.cc", - "src/heap/memory-chunk-header.h", - "src/heap/memory-chunk-inl.h", + "src/heap/mutable-page-inl.h", "src/heap/memory-chunk-layout.cc", "src/heap/memory-chunk-layout.h", "src/heap/memory-measurement.cc", @@ -2805,6 +2806,8 @@ filegroup( "src/wasm/function-body-decoder-impl.h", "src/wasm/function-compiler.cc", "src/wasm/function-compiler.h", + "src/wasm/fuzzing/random-module-generation.cc", + "src/wasm/fuzzing/random-module-generation.h", "src/wasm/graph-builder-interface.cc", "src/wasm/graph-builder-interface.h", "src/wasm/inlining-tree.h", @@ -3144,6 +3147,7 @@ filegroup( "src/compiler/phase.h", "src/compiler/pipeline.cc", "src/compiler/pipeline.h", + "src/compiler/pipeline-data-inl.h", "src/compiler/pipeline-statistics.cc", "src/compiler/pipeline-statistics.h", "src/compiler/processed-feedback.h", @@ -3213,6 +3217,7 @@ filegroup( "src/compiler/turboshaft/graph-builder.h", "src/compiler/turboshaft/graph-visualizer.cc", "src/compiler/turboshaft/graph-visualizer.h", + "src/compiler/turboshaft/js-generic-lowering-reducer.h", "src/compiler/turboshaft/index.h", "src/compiler/turboshaft/instruction-selection-phase.cc", "src/compiler/turboshaft/instruction-selection-phase.h", @@ -3234,6 +3239,7 @@ filegroup( "src/compiler/turboshaft/machine-lowering-phase.cc", "src/compiler/turboshaft/machine-lowering-phase.h", "src/compiler/turboshaft/machine-lowering-reducer-inl.h", + "src/compiler/turboshaft/maglev-early-lowering-reducer-inl.h", "src/compiler/turboshaft/maglev-graph-building-phase.cc", "src/compiler/turboshaft/maglev-graph-building-phase.h", "src/compiler/turboshaft/machine-optimization-reducer.h", @@ -3479,6 +3485,7 @@ filegroup( "src/builtins/setup-builtins-internal.cc", "src/builtins/torque-csa-header-includes.h", "src/codegen/code-stub-assembler.cc", + "third_party/v8/codegen/fp16-inl.h", "src/codegen/code-stub-assembler-inl.h", "src/codegen/code-stub-assembler.h", "src/heap/setup-heap-internal.cc", @@ -3678,6 +3685,18 @@ filegroup( }), ) +v8_library( + name = "lib_fp16", + srcs = ["third_party/fp16/src/include/fp16.h"], + hdrs = [ + "third_party/fp16/src/include/fp16/fp16.h", + "third_party/fp16/src/include/fp16/bitcasts.h", + ], + includes = [ + "third_party/fp16/src/include", + ], +) + filegroup( name = "v8_bigint", srcs = [ @@ -4167,6 +4186,7 @@ v8_library( ":noicu/generated_torque_definitions", ], deps = [ + ":lib_fp16", ":v8_libbase", "//external:base_trace_event_common", "//external:absl_btree", @@ -4223,11 +4243,11 @@ alias( v8_library( name = "v8_vtune", - srcs = glob([ + srcs = [ "src/third_party/vtune/v8-vtune.h", "src/third_party/vtune/vtune-jit.cc", "src/third_party/vtune/vtune-jit.h", - ]), + ], copts = ["-I"], deps = [ ":core_lib_noicu", diff --git a/deps/v8/BUILD.gn b/deps/v8/BUILD.gn index 0e10f0e71b5142..a25b6f2ac986f3 100644 --- a/deps/v8/BUILD.gn +++ b/deps/v8/BUILD.gn @@ -337,10 +337,12 @@ declare_args() { # Sets -DV8_ENABLE_SANDBOX. v8_enable_sandbox = "" - # Expose the memory corruption API to JavaScript. Useful for testing the sandbox. - # WARNING This will expose builtins that (by design) cause memory corruption. - # Sets -DV8_EXPOSE_MEMORY_CORRUPTION_API - v8_expose_memory_corruption_api = false + # Enable the memory corruption API. Useful for testing the sandbox. + # The memory corruption API is only exposed to JavaScript if sandbox testing + # mode is enabled at runtime, for example via --sandbox-fuzzing. + # WARNING This will enable builtins that (by design) cause memory corruption. + # Sets -DV8_ENABLE_MEMORY_CORRUPTION_API + v8_enable_memory_corruption_api = false # Experimental feature for collecting per-class zone memory stats. # Requires use_rtti = true @@ -605,24 +607,26 @@ assert( if (v8_builtins_profiling_log_file == "default") { v8_builtins_profiling_log_file = "" - # Don't use existing profile when - # * v8_enable_builtins_optimization is disabled, - # * generating a new one (i.e. v8_enable_builtins_profiling), - # * is_debug or dcheck_always_on because they add more checks to the - # builtins control flow which we don't want to generate, - # * !v8_enable_sandbox because it affects the way how external pointer values - # are accessed, - # * v8_enable_webassembly because it changes the set of opcodes which affects - # graphs hashes, + # The existing profile can be used only when + # * `v8_enable_builtins_optimization` - this switch enables builtins PGO, + # * `!v8_enable_builtins_profiling` - don't use the profiles when generating + # a new one, + # * `!is_debug && !dcheck_always_on` - these modes add more checks to + # the builtins control flow which makes the builtins code different, + # * `v8_enable_pointer_compression` - it changes the objects layouts, + # * `v8_enable_sandbox && v8_enable_external_code_space` because they affect + # the way how external pointer values are accessed, + # * `v8_enable_webassembly` because it changes the set of opcodes which + # affects graphs hashes. if (v8_enable_builtins_optimization && !v8_enable_builtins_profiling && !is_debug && !dcheck_always_on && v8_enable_webassembly) { - # This is about function arguments evaluation order, which makes node IDs - # not predictable for subgraphs like Op1(Op2(), Op3()) and as a result - # different graph hashes. + # This is about function arguments evaluation order on the machine building + # mksnapshot, which makes node IDs not predictable for subgraphs like + # Op1(Op2(), Op3()) and as a result different graph hashes. # Clang uses left-to-right order everywhere except Windows, otherwise the # order is right-to-left. # TODO(crbug.com/v8/13647): Remove once this issue is fixed in CSA. - if (!is_clang || is_win) { + if (!is_clang || host_os == "win") { pgo_profile_suffix = "-rl" } else { pgo_profile_suffix = "" @@ -680,7 +684,7 @@ assert(!v8_enable_sandbox || v8_enable_external_code_space, assert(!v8_enable_sandbox || !v8_enable_third_party_heap, "The sandbox is incompatible with the third-party heap") -assert(!v8_expose_memory_corruption_api || v8_enable_sandbox, +assert(!v8_enable_memory_corruption_api || v8_enable_sandbox, "The Memory Corruption API requires the sandbox") assert( @@ -873,6 +877,7 @@ external_v8_defines = [ "V8_IS_TSAN", "V8_ENABLE_CONSERVATIVE_STACK_SCANNING", "V8_ENABLE_DIRECT_LOCAL", + "V8_MINORMS_STRING_SHORTCUTTING", ] enabled_external_v8_defines = [] @@ -1206,8 +1211,8 @@ config("features") { if (v8_advanced_bigint_algorithms) { defines += [ "V8_ADVANCED_BIGINT_ALGORITHMS" ] } - if (v8_expose_memory_corruption_api) { - defines += [ "V8_EXPOSE_MEMORY_CORRUPTION_API" ] + if (v8_enable_memory_corruption_api) { + defines += [ "V8_ENABLE_MEMORY_CORRUPTION_API" ] } if (v8_enable_pointer_compression_8gb) { defines += [ "V8_COMPRESS_POINTERS_8GB" ] @@ -1738,7 +1743,7 @@ config("always_turbofanimize") { # TODO(crbug.com/621335) Rework this so that we don't have the confusion # between "optimize_speed" and "optimize_max". - if (((is_posix && !is_android) || is_fuchsia) && !using_sanitizer) { + if (((is_posix && !is_android) || is_fuchsia || is_win) && !using_sanitizer) { configs += [ "//build/config/compiler:optimize_speed" ] } else { configs += [ "//build/config/compiler:optimize_max" ] @@ -2735,6 +2740,7 @@ generated_file("v8_generate_features_json") { v8_enable_31bit_smis_on_64bit_arch = v8_enable_31bit_smis_on_64bit_arch v8_enable_conservative_stack_scanning = v8_enable_conservative_stack_scanning + v8_enable_direct_handle = v8_enable_direct_handle v8_enable_direct_local = v8_enable_direct_local v8_enable_extensible_ro_snapshot = v8_enable_extensible_ro_snapshot v8_enable_gdbjit = v8_enable_gdbjit @@ -2903,6 +2909,7 @@ v8_source_set("v8_initializers") { "src/interpreter/interpreter-intrinsics-generator.h", "src/numbers/integer-literal-inl.h", "src/numbers/integer-literal.h", + "third_party/v8/codegen/fp16-inl.h", ] if (v8_enable_webassembly) { @@ -3159,6 +3166,7 @@ v8_header_set("v8_flags") { sources = [ "src/flags/flag-definitions.h", + "src/flags/flags-impl.h", "src/flags/flags.h", ] @@ -3382,6 +3390,7 @@ v8_header_set("v8_internal_headers") { "src/compiler/per-isolate-compiler-cache.h", "src/compiler/persistent-map.h", "src/compiler/phase.h", + "src/compiler/pipeline-data-inl.h", "src/compiler/pipeline-statistics.h", "src/compiler/pipeline.h", "src/compiler/processed-feedback.h", @@ -3427,6 +3436,7 @@ v8_header_set("v8_internal_headers") { "src/compiler/turboshaft/graph.h", "src/compiler/turboshaft/index.h", "src/compiler/turboshaft/instruction-selection-phase.h", + "src/compiler/turboshaft/js-generic-lowering-reducer.h", "src/compiler/turboshaft/late-escape-analysis-reducer.h", "src/compiler/turboshaft/late-load-elimination-reducer.h", "src/compiler/turboshaft/layered-hash-map.h", @@ -3439,6 +3449,7 @@ v8_header_set("v8_internal_headers") { "src/compiler/turboshaft/machine-lowering-phase.h", "src/compiler/turboshaft/machine-lowering-reducer-inl.h", "src/compiler/turboshaft/machine-optimization-reducer.h", + "src/compiler/turboshaft/maglev-early-lowering-reducer-inl.h", "src/compiler/turboshaft/maglev-graph-building-phase.h", "src/compiler/turboshaft/memory-optimization-reducer.h", "src/compiler/turboshaft/operation-matcher.h", @@ -3578,7 +3589,6 @@ v8_header_set("v8_internal_headers") { "src/heap/allocation-stats.h", "src/heap/array-buffer-sweeper.h", "src/heap/base-space.h", - "src/heap/basic-memory-chunk.h", "src/heap/code-range.h", "src/heap/code-stats.h", "src/heap/collection-barrier.h", @@ -3648,9 +3658,8 @@ v8_header_set("v8_internal_headers") { "src/heap/marking.h", "src/heap/memory-allocator.h", "src/heap/memory-balancer.h", - "src/heap/memory-chunk-header.h", - "src/heap/memory-chunk-inl.h", "src/heap/memory-chunk-layout.h", + "src/heap/memory-chunk-metadata.h", "src/heap/memory-chunk.h", "src/heap/memory-measurement-inl.h", "src/heap/memory-measurement.h", @@ -3658,6 +3667,8 @@ v8_header_set("v8_internal_headers") { "src/heap/minor-gc-job.h", "src/heap/minor-mark-sweep-inl.h", "src/heap/minor-mark-sweep.h", + "src/heap/mutable-page-inl.h", + "src/heap/mutable-page.h", "src/heap/new-spaces-inl.h", "src/heap/new-spaces.h", "src/heap/object-lock.h", @@ -4253,6 +4264,7 @@ v8_header_set("v8_internal_headers") { "src/wasm/function-body-decoder-impl.h", "src/wasm/function-body-decoder.h", "src/wasm/function-compiler.h", + "src/wasm/fuzzing/random-module-generation.h", "src/wasm/graph-builder-interface.h", "src/wasm/inlining-tree.h", "src/wasm/jump-table-assembler.h", @@ -4677,6 +4689,17 @@ v8_header_set("v8_internal_headers") { "src/baseline/riscv/baseline-compiler-riscv-inl.h", ] } + if (v8_enable_webassembly) { + # Trap handling is enabled on riscv64 Linux and in simulators on + # x64 on Linux. + if ((current_cpu == "riscv64" && is_linux) || + (current_cpu == "x64" && is_linux)) { + sources += [ "src/trap-handler/handler-inside-posix.h" ] + } + if (current_cpu == "x64" && is_linux) { + sources += [ "src/trap-handler/trap-handler-simulator.h" ] + } + } } else if (v8_current_cpu == "riscv32") { sources += [ ### gcmole(riscv32) ### @@ -5305,7 +5328,6 @@ v8_source_set("v8_base_without_compiler") { "src/handles/traced-handles.cc", "src/heap/allocation-observer.cc", "src/heap/array-buffer-sweeper.cc", - "src/heap/basic-memory-chunk.cc", "src/heap/code-range.cc", "src/heap/code-stats.cc", "src/heap/collection-barrier.cc", @@ -5347,13 +5369,14 @@ v8_source_set("v8_base_without_compiler") { "src/heap/marking.cc", "src/heap/memory-allocator.cc", "src/heap/memory-balancer.cc", - "src/heap/memory-chunk-header.cc", "src/heap/memory-chunk-layout.cc", + "src/heap/memory-chunk-metadata.cc", "src/heap/memory-chunk.cc", "src/heap/memory-measurement.cc", "src/heap/memory-reducer.cc", "src/heap/minor-gc-job.cc", "src/heap/minor-mark-sweep.cc", + "src/heap/mutable-page.cc", "src/heap/new-spaces.cc", "src/heap/object-stats.cc", "src/heap/objects-visiting.cc", @@ -5734,6 +5757,12 @@ v8_source_set("v8_base_without_compiler") { "src/wasm/well-known-imports.cc", "src/wasm/wrappers.cc", ] + if (!is_official_build) { + sources += [ + ### gcmole(all) ### + "src/wasm/fuzzing/random-module-generation.cc", + ] + } } if (v8_enable_third_party_heap) { @@ -5984,6 +6013,20 @@ v8_source_set("v8_base_without_compiler") { "src/execution/riscv/simulator-riscv.cc", "src/regexp/riscv/regexp-macro-assembler-riscv.cc", ] + if (v8_enable_webassembly) { + # Trap handling is enabled on riscv64 Linux and in simulators on + # x64 on Linux. + if ((current_cpu == "riscv64" && is_linux) || + (current_cpu == "x64" && is_linux)) { + sources += [ + "src/trap-handler/handler-inside-posix.cc", + "src/trap-handler/handler-outside-posix.cc", + ] + } + if (current_cpu == "x64" && is_linux) { + sources += [ "src/trap-handler/handler-outside-simulator.cc" ] + } + } } else if (v8_current_cpu == "riscv32") { sources += [ ### gcmole(riscv32) ### @@ -6364,7 +6407,6 @@ v8_component("v8_libbase") { "src/base/timezone-cache.h", "src/base/utils/random-number-generator.cc", "src/base/utils/random-number-generator.h", - "src/base/v8-fallthrough.h", "src/base/vector.h", "src/base/virtual-address-space-page-allocator.cc", "src/base/virtual-address-space-page-allocator.h", @@ -7309,7 +7351,9 @@ group("v8_fuzzers") { ":v8_simple_wasm_async_fuzzer", ":v8_simple_wasm_code_fuzzer", ":v8_simple_wasm_compile_fuzzer", + ":v8_simple_wasm_compile_simd_fuzzer", ":v8_simple_wasm_fuzzer", + ":v8_simple_wasm_init_expr_fuzzer", ":v8_simple_wasm_streaming_fuzzer", ] } @@ -7502,6 +7546,13 @@ v8_executable("v8_hello_world") { ":v8_libplatform", "//build/win:default_exe_manifest", ] + + # Need to workaround a link error when using devtoolset + # https://bugzilla.redhat.com/show_bug.cgi?id=2268188 + if ((v8_current_cpu == "ppc64" || v8_current_cpu == "s390x") && is_linux && + !is_clang) { + libs = [ "stdc++" ] + } } v8_executable("v8_sample_process") { @@ -7772,6 +7823,27 @@ if (v8_enable_webassembly) { v8_fuzzer("wasm_compile_fuzzer") { } + v8_source_set("wasm_compile_simd_fuzzer") { + sources = [ + "test/common/wasm/test-signatures.h", + "test/fuzzer/wasm-compile-simd.cc", + ] + + deps = [ + ":fuzzer_support", + ":lib_wasm_fuzzer_common", + ":wasm_test_common", + ] + + configs = [ + ":external_config", + ":internal_config_base", + ] + } + + v8_fuzzer("wasm_compile_simd_fuzzer") { + } + v8_source_set("wasm_streaming_fuzzer") { sources = [ "test/fuzzer/wasm-streaming.cc" ] @@ -7789,6 +7861,24 @@ if (v8_enable_webassembly) { v8_fuzzer("wasm_streaming_fuzzer") { } + + v8_source_set("wasm_init_expr_fuzzer") { + sources = [ "test/fuzzer/wasm-init-expr.cc" ] + + deps = [ + ":fuzzer_support", + ":lib_wasm_fuzzer_common", + ":wasm_test_common", + ] + + configs = [ + ":external_config", + ":internal_config_base", + ] + } + + v8_fuzzer("wasm_init_expr_fuzzer") { + } } v8_source_set("inspector_fuzzer") { diff --git a/deps/v8/DEPS b/deps/v8/DEPS index 6c3ca4e741a6e0..8f2b6e603dbde4 100644 --- a/deps/v8/DEPS +++ b/deps/v8/DEPS @@ -57,7 +57,7 @@ vars = { 'checkout_fuchsia_no_hooks': False, # reclient CIPD package version - 'reclient_version': 're_client_version:0.131.1.784ddbb-gomaip', + 'reclient_version': 're_client_version:0.134.1.2c9285b-gomaip', # Fetch configuration files required for the 'use_remoteexec' gn arg 'download_remoteexec_cfg': False, @@ -73,19 +73,22 @@ vars = { 'build_with_chromium': False, # GN CIPD package version. - 'gn_version': 'git_revision:0a2b8eac80f164f10b2cbc126890db0d295790cd', + 'gn_version': 'git_revision:59c4bb920542ee903ee1df39097ae024e2e8226f', # ninja CIPD package version # https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja 'ninja_version': 'version:2@1.11.1.chromium.6', + # siso CIPD package version + 'siso_version': 'git_revision:110b1d8c0528de153cef259f09f3dc5ee627e6cb', + # luci-go CIPD package version. - 'luci_go': 'git_revision:3df60a11d33a59614c0e8d2bccc58d8c30984901', + 'luci_go': 'git_revision:623f8d17a069eaea6d0fca13147888284ec76ff1', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling Fuchsia sdk # and whatever else without interference from each other. - 'fuchsia_version': 'version:18.20240215.1.1', + 'fuchsia_version': 'version:19.20240305.3.1', # Three lines of non-changing comments so that # the commit queue can handle CLs rolling android_sdk_build-tools_version @@ -118,16 +121,16 @@ vars = { # Three lines of non-changing comments so that # the commit queue can handle CLs rolling android_sdk_tools-lint_version # and whatever else without interference from each other. - 'android_sdk_cmdline-tools_version': 'BRpfUGFd3WoveSGTLVgkQF7ugIVyywGneVICP4c0010C', + 'android_sdk_cmdline-tools_version': 'mU9jm4LkManzjSzRquV1UIA7fHBZ2pK7NtbCXxoVnVUC', } deps = { 'base/trace_event/common': Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '29ac73db520575590c3aceb0a6f1f58dda8934f6', 'build': - Var('chromium_url') + '/chromium/src/build.git' + '@' + 'e5cf1b3ceb3fec6aa5c57b34dede99d36cede32d', + Var('chromium_url') + '/chromium/src/build.git' + '@' + 'bca39698b291b392f0b4336857caf929c603ada3', 'buildtools': - Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '342659133d7d0b33f4e24b640a9ad78c0c423633', + Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '68fce43789231d29d2028ca85530e4814aac6f50', 'buildtools/linux64': { 'packages': [ { @@ -173,7 +176,7 @@ deps = { 'test/mozilla/data': Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be', 'test/test262/data': - Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'e4f91b6381d7694265031caad0c71d733ac132f3', + Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '0b1abd5ee70867311bea78e851bd609ad842011a', 'third_party/android_platform': { 'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + 'eeb2d566f963bb66212fdc0d9bbe1dde550b4969', 'condition': 'checkout_android', @@ -235,7 +238,7 @@ deps = { 'condition': "checkout_centipede_deps", }, 'third_party/catapult': { - 'url': Var('chromium_url') + '/catapult.git' + '@' + '3d6c15240b480da1e498a64a72ea77a61ba335e1', + 'url': Var('chromium_url') + '/catapult.git' + '@' + '97c002a33e5b777eaa60e3ddc977a185f89446f7', 'condition': 'checkout_android', }, 'third_party/clang-format/script': @@ -249,11 +252,11 @@ deps = { 'condition': 'checkout_android', }, 'third_party/depot_tools': - Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '9d7c8e76f82ddc6a3bbc307217e31dec44a0f73a', + Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'fe6a359a803f55829ede3666215d080f6775f173', 'third_party/fp16/src': Var('chromium_url') + '/external/github.com/Maratyszcza/FP16.git' + '@' + '0a92994d729ff76a58f692d3028ca1b64b145d91', 'third_party/fuchsia-gn-sdk': { - 'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-gn-sdk.git' + '@' + 'fa3c41d7a15127a989111fcede8dae9265f8566b', + 'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-gn-sdk.git' + '@' + '727f65f8dae76c0d5c39c0f95d9d8f3a90de79f1', 'condition': 'checkout_fuchsia', }, # Exists for rolling the Fuchsia SDK. Check out of the SDK should always @@ -269,17 +272,17 @@ deps = { 'dep_type': 'cipd', }, 'third_party/google_benchmark_chrome': { - 'url': Var('chromium_url') + '/chromium/src/third_party/google_benchmark.git' + '@' + 'c300add93460c31efe53fa71e61427fa1bc09e6a', + 'url': Var('chromium_url') + '/chromium/src/third_party/google_benchmark.git' + '@' + 'f049b96d7a50ae19f2748aae7fba7bde705bcd8c', }, 'third_party/google_benchmark_chrome/src': { - 'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + 'b177433f3ee2513b1075140c723d73ab8901790f', + 'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + '344117638c8ff7e239044fd0fa7085839fc03021', }, 'third_party/fuzztest': - Var('chromium_url') + '/chromium/src/third_party/fuzztest.git' + '@' + '9fc64e5930915bfb5a593b7e12487d78283e8221', + Var('chromium_url') + '/chromium/src/third_party/fuzztest.git' + '@' + 'daea7ab861050a6445f59758f09cc3173f5add76', 'third_party/fuzztest/src': - Var('chromium_url') + '/external/github.com/google/fuzztest.git' + '@' + '61d95200e7ece7d121cab26f0c39fbf392e6566e', + Var('chromium_url') + '/external/github.com/google/fuzztest.git' + '@' + 'bddcd9f77ba0a81a99ce50bcadf5149efe545df0', 'third_party/googletest/src': - Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'af29db7ec28d6df1c7f0f745186884091e602e07', + Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'b479e7a3c161d7087113a05f8cb034b870313a55', 'third_party/icu': Var('chromium_url') + '/chromium/deps/icu.git' + '@' + 'a622de35ac311c5ad390a7af80724634e5dc61ed', 'third_party/instrumented_libraries': @@ -295,9 +298,9 @@ deps = { 'third_party/jsoncpp/source': Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '42e892d96e47b1f6e29844cc705e148ec4856448', 'third_party/libc++/src': - Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '6d83791af99ea95f04986d64f111b84ce0b3c6f5', + Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '80307e66e74bae927fb8709a549859e777e3bf0b', 'third_party/libc++abi/src': - Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + 'a7b3d968a3a923886fea64b424bd770e69dc4ea4', + Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + 'fc6253a642c9e336480b17fb17771e2c1efc7fff', 'third_party/libunwind/src': Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '8bad7bd6ec30f94bce82f7cb5b58ecbd6ce02996', 'third_party/logdog/logdog': @@ -319,15 +322,25 @@ deps = { 'third_party/protobuf': Var('chromium_url') + '/external/github.com/google/protobuf'+ '@' + '6a59a2ad1f61d9696092f79b6d74368b4d7970a3', 'third_party/re2/src': - Var('chromium_url') + '/external/github.com/google/re2.git' + '@' + 'd00d1e93781e6ebe415771a952689dff8f260d44', + Var('chromium_url') + '/external/github.com/google/re2.git' + '@' + '108914d28a79243d4300e7e651cd0a0d5883ca0f', 'third_party/requests': { 'url': Var('chromium_url') + '/external/github.com/kennethreitz/requests.git' + '@' + 'c7e0fc087ceeadb8b4c84a0953a422c474093d6d', 'condition': 'checkout_android', }, + 'third_party/siso': { + 'packages': [ + { + 'package': 'infra/build/siso/${{platform}}', + 'version': Var('siso_version'), + } + ], + 'dep_type': 'cipd', + 'condition': 'not build_with_chromium and host_cpu != "s390" and host_cpu != "ppc"', + }, 'third_party/zlib': - Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + '4b5807f344182fd392849b820642457212618e5f', + Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'c5bf1b566e5df14e763507e2ce30cbfebefeeccf', 'tools/clang': - Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + 'a4df104173dae7d49205ed8abefc920b7c5162d2', + Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '1ed379eda880f53d895559815cd3e30b370abff5', 'tools/luci-go': { 'packages': [ { @@ -343,7 +356,7 @@ deps = { 'dep_type': 'cipd', }, 'third_party/abseil-cpp': { - 'url': Var('chromium_url') + '/chromium/src/third_party/abseil-cpp.git' + '@' + 'f1c5751a2cb4102efbffc4110ee7551b3c54cfea', + 'url': Var('chromium_url') + '/chromium/src/third_party/abseil-cpp.git' + '@' + 'b3ae305fd5dbc6ad41eed9add26768c29181219f', 'condition': 'not build_with_chromium', } } @@ -354,6 +367,8 @@ include_rules = [ '+unicode', '+third_party/fdlibm', '+third_party/ittapi/include', + '+third_party/fp16/src/include', + '+third_party/v8/codegen', '+third_party/fuzztest', # Abseil features are allow-listed. Please use your best judgement when adding # to this set -- if in doubt, email v8-dev@. For general guidance, refer to @@ -746,4 +761,15 @@ hooks = [ '--skip_remoteexec_cfg_fetch', ], }, + # Configure Siso for developer builds. + { + 'name': 'configure_siso', + 'pattern': '.', + 'condition': 'not build_with_chromium', + 'action': ['python3', + 'build/config/siso/configure_siso.py', + '--rbe_instance', + Var('rbe_instance'), + ], + }, ] diff --git a/deps/v8/ENG_REVIEW_OWNERS b/deps/v8/ENG_REVIEW_OWNERS index 4f80f9d15a74c0..7d582ec7d4016b 100644 --- a/deps/v8/ENG_REVIEW_OWNERS +++ b/deps/v8/ENG_REVIEW_OWNERS @@ -5,6 +5,7 @@ adamk@chromium.org danno@chromium.org hpayer@chromium.org +leszeks@chromium.org mlippautz@chromium.org verwaest@chromium.org vahl@chromium.org diff --git a/deps/v8/WORKSPACE b/deps/v8/WORKSPACE index 87d8cb1fe8c991..96ef24384ed084 100644 --- a/deps/v8/WORKSPACE +++ b/deps/v8/WORKSPACE @@ -61,7 +61,7 @@ bind( new_local_repository( name = "com_googlesource_chromium_icu", - build_file = "bazel/BUILD.icu", + build_file = ":bazel/BUILD.icu", path = "third_party/icu", ) @@ -72,7 +72,7 @@ bind( new_local_repository( name = "com_googlesource_chromium_base_trace_event_common", - build_file = "bazel/BUILD.trace_event_common", + build_file = "//:bazel/BUILD.trace_event_common", path = "base/trace_event/common", ) diff --git a/deps/v8/bazel/defs.bzl b/deps/v8/bazel/defs.bzl index 1ea9bc0da8de0f..f23f48ef03ae6a 100644 --- a/deps/v8/bazel/defs.bzl +++ b/deps/v8/bazel/defs.bzl @@ -313,10 +313,7 @@ def v8_library( # split the set of outputs by using OutputGroupInfo, that way we do not need to # run the torque generator twice. def _torque_files_impl(ctx): - if ctx.workspace_name == "v8": - v8root = "." - else: - v8root = "external/v8" + v8root = "." # Arguments args = [] @@ -414,7 +411,7 @@ def _v8_target_cpu_transition_impl(settings, # Check for an existing v8_target_cpu flag. if "@v8//bazel/config:v8_target_cpu" in settings: if settings["@v8//bazel/config:v8_target_cpu"] != "none": - return + return {} # Auto-detect target architecture based on the --cpu flag. mapping = { @@ -480,9 +477,6 @@ _v8_mksnapshot = rule( cfg = "exec", ), "target_os": attr.string(mandatory = True), - "_allowlist_function_transition": attr.label( - default = "@bazel_tools//tools/allowlists/function_transition_allowlist", - ), "prefix": attr.string(mandatory = True), "suffix": attr.string(mandatory = True), }, diff --git a/deps/v8/bazel/v8-non-pointer-compression.bzl b/deps/v8/bazel/v8-non-pointer-compression.bzl index 7bb23591ca380c..a9f73728301254 100644 --- a/deps/v8/bazel/v8-non-pointer-compression.bzl +++ b/deps/v8/bazel/v8-non-pointer-compression.bzl @@ -1,4 +1,9 @@ -def _v8_disable_pointer_compression(settings, attr): +""" +Exposes the rule v8_binary_non_pointer_compression, which forces a label +to be compiled without pointer compression. +""" + +def _v8_disable_pointer_compression(): return { "//:v8_enable_pointer_compression": "False", } @@ -42,17 +47,6 @@ v8_binary_non_pointer_compression = rule( # Note specificaly how it's configured with v8_target_cpu_transition, which # ensures that setting propagates down the graph. "binary": attr.label(cfg = v8_disable_pointer_compression), - # This is a stock Bazel requirement for any rule that uses Starlark - # transitions. It's okay to copy the below verbatim for all such rules. - # - # The purpose of this requirement is to give the ability to restrict - # which packages can invoke these rules, since Starlark transitions - # make much larger graphs possible that can have memory and performance - # consequences for your build. The whitelist defaults to "everything". - # But you can redefine it more strictly if you feel that's prudent. - "_allowlist_function_transition": attr.label( - default = "@bazel_tools//tools/allowlists/function_transition_allowlist", - ), }, # Making this executable means it works with "$ bazel run". executable = True, diff --git a/deps/v8/build_overrides/build.gni b/deps/v8/build_overrides/build.gni index 9830dfc51d0213..32896733f83bf1 100644 --- a/deps/v8/build_overrides/build.gni +++ b/deps/v8/build_overrides/build.gni @@ -42,6 +42,12 @@ enable_java_templates = false # Enables assertions on safety checks in libc++. enable_safe_libcxx = true +# Enable assertions on safety checks, also in libstdc++ +# +# In case the C++ standard library implementation used is libstdc++, then +# enable its own hardening checks. +enable_safe_libstdcxx = true + # Allows different projects to specify their own suppressions files. asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc" lsan_suppressions_file = "//build/sanitizers/lsan_suppressions.cc" diff --git a/deps/v8/gni/v8.gni b/deps/v8/gni/v8.gni index 185de67a52237b..7c4e3ba5c34af2 100644 --- a/deps/v8/gni/v8.gni +++ b/deps/v8/gni/v8.gni @@ -109,7 +109,7 @@ declare_args() { v8_enable_direct_handle = "" # Use direct pointers in local handles. - v8_enable_direct_local = false + v8_enable_direct_local = "" # Check for off-stack allocated local handles. v8_enable_local_off_stack_check = false @@ -212,11 +212,14 @@ if (v8_enable_turbofan == "") { assert(v8_enable_turbofan || !v8_enable_webassembly, "Webassembly is not available when Turbofan is disabled.") -# Direct internal handles are enabled by default if conservative stack scanning -# is enabled. +# Direct internal handles and direct locals are enabled by default if +# conservative stack scanning is enabled. if (v8_enable_direct_handle == "") { v8_enable_direct_handle = v8_enable_conservative_stack_scanning } +if (v8_enable_direct_local == "") { + v8_enable_direct_local = v8_enable_conservative_stack_scanning +} # Points to // in v8 stand-alone or to //v8/ in chromium. We need absolute # paths for all configs in templates as they are shared in different @@ -245,7 +248,7 @@ if (is_debug && !v8_optimized_debug) { # TODO(crbug.com/621335) Rework this so that we don't have the confusion # between "optimize_speed" and "optimize_max". - if (is_posix && !is_android && !using_sanitizer) { + if (((is_posix && !is_android) || is_win) && !using_sanitizer) { v8_add_configs += [ "//build/config/compiler:optimize_speed" ] } else { v8_add_configs += [ "//build/config/compiler:optimize_max" ] diff --git a/deps/v8/include/v8-context.h b/deps/v8/include/v8-context.h index c81dc80c526ca2..4849c925806f95 100644 --- a/deps/v8/include/v8-context.h +++ b/deps/v8/include/v8-context.h @@ -84,6 +84,29 @@ class V8_EXPORT Context : public Data { * created by a previous call to Context::New with the same global * template. The state of the global object will be completely reset * and only object identify will remain. + * + * \param internal_fields_deserializer An optional callback used + * to deserialize fields set by + * v8::Object::SetAlignedPointerInInternalField() in wrapper objects + * from the default context snapshot. It should match the + * SerializeInternalFieldsCallback() used by + * v8::SnapshotCreator::SetDefaultContext() when the default context + * snapshot is created. It does not need to be configured if the default + * context snapshot contains no wrapper objects with pointer internal + * fields, or if no custom startup snapshot is configured + * in the v8::CreateParams used to create the isolate. + * + * \param microtask_queue An optional microtask queue used to manage + * the microtasks created in this context. If not set the per-isolate + * default microtask queue would be used. + * + * \param context_data_deserializer An optional callback used + * to deserialize embedder data set by + * v8::Context::SetAlignedPointerInEmbedderData() in the default + * context from the default context snapshot. It does not need to be + * configured if the default context snapshot contains no pointer embedder + * data, or if no custom startup snapshot is configured in the + * v8::CreateParams used to create the isolate. */ static Local New( Isolate* isolate, ExtensionConfiguration* extensions = nullptr, @@ -91,7 +114,9 @@ class V8_EXPORT Context : public Data { MaybeLocal global_object = MaybeLocal(), DeserializeInternalFieldsCallback internal_fields_deserializer = DeserializeInternalFieldsCallback(), - MicrotaskQueue* microtask_queue = nullptr); + MicrotaskQueue* microtask_queue = nullptr, + DeserializeContextDataCallback context_data_deserializer = + DeserializeContextDataCallback()); /** * Create a new context from a (non-default) context snapshot. There @@ -103,21 +128,37 @@ class V8_EXPORT Context : public Data { * \param context_snapshot_index The index of the context snapshot to * deserialize from. Use v8::Context::New for the default snapshot. * - * \param embedder_fields_deserializer Optional callback to deserialize - * internal fields. It should match the SerializeInternalFieldCallback used - * to serialize. + * \param internal_fields_deserializer An optional callback used + * to deserialize fields set by + * v8::Object::SetAlignedPointerInInternalField() in wrapper objects + * from the default context snapshot. It does not need to be + * configured if there are no wrapper objects with no internal + * pointer fields in the default context snapshot or if no startup + * snapshot is configured when the isolate is created. * * \param extensions See v8::Context::New. * * \param global_object See v8::Context::New. + * + * \param internal_fields_deserializer Similar to + * internal_fields_deserializer in v8::Context::New but applies to + * the context specified by the context_snapshot_index. + * + * \param microtask_queue See v8::Context::New. + * + * \param context_data_deserializer Similar to + * context_data_deserializer in v8::Context::New but applies to + * the context specified by the context_snapshot_index. */ static MaybeLocal FromSnapshot( Isolate* isolate, size_t context_snapshot_index, - DeserializeInternalFieldsCallback embedder_fields_deserializer = + DeserializeInternalFieldsCallback internal_fields_deserializer = DeserializeInternalFieldsCallback(), ExtensionConfiguration* extensions = nullptr, MaybeLocal global_object = MaybeLocal(), - MicrotaskQueue* microtask_queue = nullptr); + MicrotaskQueue* microtask_queue = nullptr, + DeserializeContextDataCallback context_data_deserializer = + DeserializeContextDataCallback()); /** * Returns an global object that isn't backed by an actual context. @@ -181,27 +222,8 @@ class V8_EXPORT Context : public Data { * also be considered for freezing should be added to the children_out * parameter. Returns true if the operation completed successfully. */ - V8_DEPRECATED("Please use the version that takes a LocalVector&") - virtual bool FreezeEmbedderObjectAndGetChildren( - Local obj, std::vector>& children_out) { - // TODO(chromium:1454114): This method is temporarily defined in order to - // smoothen the transition to the version that follows. - return true; - } virtual bool FreezeEmbedderObjectAndGetChildren( - Local obj, LocalVector& children_out) { - // TODO(chromium:1454114): This method is temporarily defined and - // calls the previous version, soon to be deprecated, in order to - // smoothen the transition. When deprecation is completed, this - // will become an abstract method. - std::vector> children; - START_ALLOW_USE_DEPRECATED() - // Temporarily use the old callback. - bool result = FreezeEmbedderObjectAndGetChildren(obj, children); - END_ALLOW_USE_DEPRECATED() - children_out.insert(children_out.end(), children.begin(), children.end()); - return result; - } + Local obj, LocalVector& children_out) = 0; }; /** @@ -328,22 +350,6 @@ class V8_EXPORT Context : public Data { Local context); void SetAbortScriptExecution(AbortScriptExecutionCallback callback); - /** - * Returns the value that was set or restored by - * SetContinuationPreservedEmbedderData(), if any. - */ - V8_DEPRECATE_SOON( - "Use v8::Isolate::GetContinuationPreservedEmbedderData instead") - Local GetContinuationPreservedEmbedderData() const; - - /** - * Sets a value that will be stored on continuations and reset while the - * continuation runs. - */ - V8_DEPRECATE_SOON( - "Use v8::Isolate::SetContinuationPreservedEmbedderData instead") - void SetContinuationPreservedEmbedderData(Local context); - /** * Set or clear hooks to be invoked for promise lifecycle operations. * To clear a hook, set it to an empty v8::Function. Each function will diff --git a/deps/v8/include/v8-function-callback.h b/deps/v8/include/v8-function-callback.h index 22b5328d101f89..86a3ea72f4033c 100644 --- a/deps/v8/include/v8-function-callback.h +++ b/deps/v8/include/v8-function-callback.h @@ -82,8 +82,15 @@ class ReturnValue { friend class PropertyCallbackInfo; template friend class PersistentValueMapBase; - V8_INLINE void SetInternal(internal::Address value) { *value_ = value; } - V8_INLINE internal::Address GetDefaultValue(); + V8_INLINE void SetInternal(internal::Address value); + // Setting the hole value has different meanings depending on the usage: + // - for function template callbacks it means that the callback returns + // the undefined value, + // - for property getter callbacks is means that the callback returns + // the undefined value (for property setter callbacks the value returned + // is ignored), + // - for interceptor callbacks it means that the request was not handled. + V8_INLINE void SetTheHole(); V8_INLINE explicit ReturnValue(internal::Address* slot); // See FunctionCallbackInfo. @@ -286,14 +293,28 @@ using FunctionCallback = void (*)(const FunctionCallbackInfo& info); template ReturnValue::ReturnValue(internal::Address* slot) : value_(slot) {} +template +void ReturnValue::SetInternal(internal::Address value) { +#if V8_STATIC_ROOTS_BOOL + using I = internal::Internals; + // Ensure that the upper 32-bits are not modified. Compiler should be + // able to optimize this to a store of a lower 32-bits of the value. + // This is fine since the callback can return only JavaScript values which + // are either Smis or heap objects allocated in the main cage. + *value_ = I::DecompressTaggedField(*value_, I::CompressTagged(value)); +#else + *value_ = value; +#endif // V8_STATIC_ROOTS_BOOL +} + template template void ReturnValue::Set(const Global& handle) { static_assert(std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetTheHole(); } else { - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } } @@ -304,7 +325,7 @@ void ReturnValue::SetNonEmpty(const Global& handle) { #ifdef V8_ENABLE_CHECKS internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); #endif // V8_ENABLE_CHECKS - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } template @@ -312,9 +333,9 @@ template void ReturnValue::Set(const BasicTracedReference& handle) { static_assert(std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetTheHole(); } else { - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } } @@ -325,7 +346,7 @@ void ReturnValue::SetNonEmpty(const BasicTracedReference& handle) { #ifdef V8_ENABLE_CHECKS internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); #endif // V8_ENABLE_CHECKS - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } template @@ -334,9 +355,9 @@ void ReturnValue::Set(const Local handle) { static_assert(std::is_void::value || std::is_base_of::value, "type check"); if (V8_UNLIKELY(handle.IsEmpty())) { - *value_ = GetDefaultValue(); + SetTheHole(); } else { - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } } @@ -348,13 +369,13 @@ void ReturnValue::SetNonEmpty(const Local handle) { #ifdef V8_ENABLE_CHECKS internal::VerifyHandleIsNonEmpty(handle.IsEmpty()); #endif // V8_ENABLE_CHECKS - *value_ = handle.ptr(); + SetInternal(handle.ptr()); } template void ReturnValue::Set(double i) { static_assert(std::is_base_of::value, "type check"); - Set(Number::New(GetIsolate(), i)); + SetNonEmpty(Number::New(GetIsolate(), i)); } template @@ -362,10 +383,10 @@ void ReturnValue::Set(int32_t i) { static_assert(std::is_base_of::value, "type check"); using I = internal::Internals; if (V8_LIKELY(I::IsValidSmi(i))) { - *value_ = I::IntToSmi(i); + SetInternal(I::IntToSmi(i)); return; } - Set(Integer::New(GetIsolate(), i)); + SetNonEmpty(Integer::New(GetIsolate(), i)); } template @@ -377,7 +398,7 @@ void ReturnValue::Set(uint32_t i) { Set(static_cast(i)); return; } - Set(Integer::NewFromUnsigned(GetIsolate(), i)); + SetNonEmpty(Integer::NewFromUnsigned(GetIsolate(), i)); } template @@ -386,7 +407,7 @@ void ReturnValue::Set(uint16_t i) { using I = internal::Internals; static_assert(I::IsValidSmi(std::numeric_limits::min())); static_assert(I::IsValidSmi(std::numeric_limits::max())); - *value_ = I::IntToSmi(i); + SetInternal(I::IntToSmi(i)); } template @@ -398,9 +419,8 @@ void ReturnValue::Set(bool value) { internal::PerformCastCheck( internal::ValueHelper::SlotAsValue(value_)); #endif // V8_ENABLE_CHECKS - *value_ = I::DecompressTaggedField( - *value_, value ? I::StaticReadOnlyRoot::kTrueValue - : I::StaticReadOnlyRoot::kFalseValue); + SetInternal(value ? I::StaticReadOnlyRoot::kTrueValue + : I::StaticReadOnlyRoot::kFalseValue); #else int root_index; if (value) { @@ -412,6 +432,16 @@ void ReturnValue::Set(bool value) { #endif // V8_STATIC_ROOTS_BOOL } +template +void ReturnValue::SetTheHole() { + using I = internal::Internals; +#if V8_STATIC_ROOTS_BOOL + SetInternal(I::StaticReadOnlyRoot::kTheHoleValue); +#else + *value_ = I::GetRoot(GetIsolate(), I::kTheHoleValueRootIndex); +#endif // V8_STATIC_ROOTS_BOOL +} + template void ReturnValue::SetNull() { static_assert(std::is_base_of::value, "type check"); @@ -421,8 +451,7 @@ void ReturnValue::SetNull() { internal::PerformCastCheck( internal::ValueHelper::SlotAsValue(value_)); #endif // V8_ENABLE_CHECKS - *value_ = - I::DecompressTaggedField(*value_, I::StaticReadOnlyRoot::kNullValue); + SetInternal(I::StaticReadOnlyRoot::kNullValue); #else *value_ = I::GetRoot(GetIsolate(), I::kNullValueRootIndex); #endif // V8_STATIC_ROOTS_BOOL @@ -437,8 +466,7 @@ void ReturnValue::SetUndefined() { internal::PerformCastCheck( internal::ValueHelper::SlotAsValue(value_)); #endif // V8_ENABLE_CHECKS - *value_ = - I::DecompressTaggedField(*value_, I::StaticReadOnlyRoot::kUndefinedValue); + SetInternal(I::StaticReadOnlyRoot::kUndefinedValue); #else *value_ = I::GetRoot(GetIsolate(), I::kUndefinedValueRootIndex); #endif // V8_STATIC_ROOTS_BOOL @@ -453,8 +481,7 @@ void ReturnValue::SetEmptyString() { internal::PerformCastCheck( internal::ValueHelper::SlotAsValue(value_)); #endif // V8_ENABLE_CHECKS - *value_ = - I::DecompressTaggedField(*value_, I::StaticReadOnlyRoot::kEmptyString); + SetInternal(I::StaticReadOnlyRoot::kEmptyString); #else *value_ = I::GetRoot(GetIsolate(), I::kEmptyStringRootIndex); #endif // V8_STATIC_ROOTS_BOOL @@ -485,12 +512,6 @@ void ReturnValue::Set(S* whatever) { static_assert(sizeof(S) < 0, "incompilable to prevent inadvertent misuse"); } -template -internal::Address ReturnValue::GetDefaultValue() { - using I = internal::Internals; - return I::GetRoot(GetIsolate(), I::kTheHoleValueRootIndex); -} - template FunctionCallbackInfo::FunctionCallbackInfo(internal::Address* implicit_args, internal::Address* values, diff --git a/deps/v8/include/v8-internal.h b/deps/v8/include/v8-internal.h index 48001c68b0b433..322b22d98e8be4 100644 --- a/deps/v8/include/v8-internal.h +++ b/deps/v8/include/v8-internal.h @@ -425,7 +425,7 @@ constexpr uint64_t kAllExternalPointerTypeTags[] = { /* it is the Embedder's responsibility to ensure type safety (against */ \ /* substitution) and lifetime validity of these objects. */ \ V(kExternalObjectValueTag, TAG(13)) \ - V(kCallHandlerInfoCallbackTag, TAG(14)) \ + V(kFunctionTemplateInfoCallbackTag, TAG(14)) \ V(kAccessorInfoGetterTag, TAG(15)) \ V(kAccessorInfoSetterTag, TAG(16)) \ V(kWasmInternalFunctionCallTargetTag, TAG(17)) \ @@ -478,7 +478,7 @@ V8_INLINE static constexpr bool IsSharedExternalPointerType( V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType( ExternalPointerTag tag) { return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag || - tag == kCallHandlerInfoCallbackTag; + tag == kFunctionTemplateInfoCallbackTag; } // Sanity checks. @@ -746,23 +746,28 @@ class Internals { #if V8_STATIC_ROOTS_BOOL -// These constants need to be initialized in api.cc. +// These constants are copied from static-roots.h and guarded by static asserts. #define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \ - V(UndefinedValue) \ - V(NullValue) \ - V(TrueValue) \ - V(FalseValue) \ - V(EmptyString) \ - V(TheHoleValue) + V(UndefinedValue, 0x69) \ + V(NullValue, 0x85) \ + V(TrueValue, 0xc9) \ + V(FalseValue, 0xad) \ + V(EmptyString, 0xa1) \ + V(TheHoleValue, 0x719) using Tagged_t = uint32_t; struct StaticReadOnlyRoot { -#define DEF_ROOT(name) V8_EXPORT static const Tagged_t k##name; +#define DEF_ROOT(name, value) static constexpr Tagged_t k##name = value; EXPORTED_STATIC_ROOTS_PTR_LIST(DEF_ROOT) #undef DEF_ROOT - V8_EXPORT static const Tagged_t kFirstStringMap; - V8_EXPORT static const Tagged_t kLastStringMap; + static constexpr Tagged_t kFirstStringMap = 0xe5; + static constexpr Tagged_t kLastStringMap = 0x47d; + +#define PLUSONE(...) +1 + static constexpr size_t kNumberOfExportedStaticRoots = + 2 + EXPORTED_STATIC_ROOTS_PTR_LIST(PLUSONE); +#undef PLUSONE }; #endif // V8_STATIC_ROOTS_BOOL @@ -786,6 +791,11 @@ class Internals { static const int kJSObjectType = 0x421; static const int kFirstJSApiObjectType = 0x422; static const int kLastJSApiObjectType = 0x80A; + // Defines a range [kFirstEmbedderJSApiObjectType, kJSApiObjectTypesCount] + // of JSApiObject instance type values that an embedder can use. + static const int kFirstEmbedderJSApiObjectType = 0; + static const int kLastEmbedderJSApiObjectType = + kLastJSApiObjectType - kFirstJSApiObjectType; static const int kUndefinedOddballKind = 4; static const int kNullOddballKind = 3; @@ -939,15 +949,15 @@ class Internals { Address base = *reinterpret_cast( reinterpret_cast(isolate) + kIsolateCageBaseOffset); switch (index) { -#define DECOMPRESS_ROOT(name) \ - case k##name##RootIndex: \ +#define DECOMPRESS_ROOT(name, ...) \ + case k##name##RootIndex: \ return base + StaticReadOnlyRoot::k##name; EXPORTED_STATIC_ROOTS_PTR_LIST(DECOMPRESS_ROOT) #undef DECOMPRESS_ROOT +#undef EXPORTED_STATIC_ROOTS_PTR_LIST default: break; } -#undef EXPORTED_STATIC_ROOTS_PTR_LIST #endif // V8_STATIC_ROOTS_BOOL return *GetRootSlot(isolate, index); } @@ -1046,6 +1056,10 @@ class Internals { return addr & -static_cast(kPtrComprCageBaseAlignment); } + V8_INLINE static uint32_t CompressTagged(Address value) { + return static_cast(value); + } + V8_INLINE static Address DecompressTaggedField(Address heap_object_ptr, uint32_t value) { Address base = GetPtrComprCageBaseFromOnHeapAddress(heap_object_ptr); diff --git a/deps/v8/include/v8-isolate.h b/deps/v8/include/v8-isolate.h index a3ceec01334ea0..585b513fac446a 100644 --- a/deps/v8/include/v8-isolate.h +++ b/deps/v8/include/v8-isolate.h @@ -562,6 +562,7 @@ class V8_EXPORT Isolate { kWasmTypeReflection = 137, kWasmExnRef = 138, kWasmTypedFuncRef = 139, + kInvalidatedStringWrapperToPrimitiveProtector = 140, // If you add new values here, you'll also need to update Chromium's: // web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to diff --git a/deps/v8/include/v8-persistent-handle.h b/deps/v8/include/v8-persistent-handle.h index 9db5af5dddd557..49518fe3631945 100644 --- a/deps/v8/include/v8-persistent-handle.h +++ b/deps/v8/include/v8-persistent-handle.h @@ -241,7 +241,7 @@ class NonCopyablePersistentTraits { * This will clone the contents of storage cell, but not any of the flags, etc. */ template -struct CopyablePersistentTraits { +struct V8_DEPRECATED("Use v8::Global instead") CopyablePersistentTraits { using CopyablePersistent = Persistent>; static const bool kResetInDestructor = true; template diff --git a/deps/v8/include/v8-platform.h b/deps/v8/include/v8-platform.h index b61f27af6e3410..313c0287bcf882 100644 --- a/deps/v8/include/v8-platform.h +++ b/deps/v8/include/v8-platform.h @@ -76,8 +76,12 @@ class TaskRunner { /** * Schedules a task to be invoked by this TaskRunner. The TaskRunner * implementation takes ownership of |task|. + * + * Embedders should override PostTaskImpl instead of this. */ - virtual void PostTask(std::unique_ptr task) = 0; + virtual void PostTask(std::unique_ptr task) { + PostTaskImpl(std::move(task), SourceLocation::Current()); + } /** * Schedules a task to be invoked by this TaskRunner. The TaskRunner @@ -93,16 +97,25 @@ class TaskRunner { * execution is not allowed to nest. * * Requires that |TaskRunner::NonNestableTasksEnabled()| is true. + * + * Embedders should override PostNonNestableTaskImpl instead of this. */ - virtual void PostNonNestableTask(std::unique_ptr task) {} + virtual void PostNonNestableTask(std::unique_ptr task) { + PostNonNestableTaskImpl(std::move(task), SourceLocation::Current()); + } /** * Schedules a task to be invoked by this TaskRunner. The task is scheduled * after the given number of seconds |delay_in_seconds|. The TaskRunner * implementation takes ownership of |task|. + * + * Embedders should override PostDelayedTaskImpl instead of this. */ virtual void PostDelayedTask(std::unique_ptr task, - double delay_in_seconds) = 0; + double delay_in_seconds) { + PostDelayedTaskImpl(std::move(task), delay_in_seconds, + SourceLocation::Current()); + } /** * Schedules a task to be invoked by this TaskRunner. The task is scheduled @@ -119,9 +132,14 @@ class TaskRunner { * execution is not allowed to nest. * * Requires that |TaskRunner::NonNestableDelayedTasksEnabled()| is true. + * + * Embedders should override PostNonNestableDelayedTaskImpl instead of this. */ virtual void PostNonNestableDelayedTask(std::unique_ptr task, - double delay_in_seconds) {} + double delay_in_seconds) { + PostNonNestableDelayedTaskImpl(std::move(task), delay_in_seconds, + SourceLocation::Current()); + } /** * Schedules an idle task to be invoked by this TaskRunner. The task is @@ -130,8 +148,12 @@ class TaskRunner { * relative to other task types and may be starved for an arbitrarily long * time if no idle time is available. The TaskRunner implementation takes * ownership of |task|. + * + * Embedders should override PostIdleTaskImpl instead of this. */ - virtual void PostIdleTask(std::unique_ptr task) = 0; + virtual void PostIdleTask(std::unique_ptr task) { + PostIdleTaskImpl(std::move(task), SourceLocation::Current()); + } /** * Returns true if idle tasks are enabled for this TaskRunner. @@ -153,6 +175,23 @@ class TaskRunner { TaskRunner(const TaskRunner&) = delete; TaskRunner& operator=(const TaskRunner&) = delete; + + protected: + /** + * Implementation of above methods with an additional `location` argument. + */ + virtual void PostTaskImpl(std::unique_ptr task, + const SourceLocation& location) {} + virtual void PostNonNestableTaskImpl(std::unique_ptr task, + const SourceLocation& location) {} + virtual void PostDelayedTaskImpl(std::unique_ptr task, + double delay_in_seconds, + const SourceLocation& location) {} + virtual void PostNonNestableDelayedTaskImpl(std::unique_ptr task, + double delay_in_seconds, + const SourceLocation& location) {} + virtual void PostIdleTaskImpl(std::unique_ptr task, + const SourceLocation& location) {} }; /** diff --git a/deps/v8/include/v8-script.h b/deps/v8/include/v8-script.h index db22de9b18797b..75589863d9d1c7 100644 --- a/deps/v8/include/v8-script.h +++ b/deps/v8/include/v8-script.h @@ -291,11 +291,6 @@ class V8_EXPORT Module : public Data { * module_name is used solely for logging/debugging and doesn't affect module * behavior. */ - V8_DEPRECATED("Please use the version that takes a MemorySpan") - static Local CreateSyntheticModule( - Isolate* isolate, Local module_name, - const std::vector>& export_names, - SyntheticModuleEvaluationSteps evaluation_steps); static Local CreateSyntheticModule( Isolate* isolate, Local module_name, const MemorySpan>& export_names, @@ -311,17 +306,6 @@ class V8_EXPORT Module : public Data { V8_WARN_UNUSED_RESULT Maybe SetSyntheticModuleExport( Isolate* isolate, Local export_name, Local export_value); - /** - * Search the modules requested directly or indirectly by the module for - * any top-level await that has not yet resolved. If there is any, the - * returned vector contains a tuple of the unresolved module and a message - * with the pending top-level await. - * An embedder may call this before exiting to improve error messages. - */ - V8_DEPRECATED("Please use GetStalledTopLevelAwaitMessages") - std::vector, Local>> - GetStalledTopLevelAwaitMessage(Isolate* isolate); - /** * Search the modules requested directly or indirectly by the module for * any top-level await that has not yet resolved. If there is any, the diff --git a/deps/v8/include/v8-snapshot.h b/deps/v8/include/v8-snapshot.h index a1dc0c3881c22d..9e5a53f134a82c 100644 --- a/deps/v8/include/v8-snapshot.h +++ b/deps/v8/include/v8-snapshot.h @@ -38,7 +38,7 @@ class V8_EXPORT StartupData { /** * Callback and supporting data used in SnapshotCreator to implement embedder - * logic to serialize internal fields. + * logic to serialize internal fields of v8::Objects. * Internal fields that directly reference V8 objects are serialized without * calling this callback. Internal fields that contain aligned pointers are * serialized by this callback if it returns non-zero result. Otherwise it is @@ -53,13 +53,24 @@ struct SerializeInternalFieldsCallback { CallbackFunction callback; void* data; }; -// Note that these fields are called "internal fields" in the API and called -// "embedder fields" within V8. -using SerializeEmbedderFieldsCallback = SerializeInternalFieldsCallback; + +/** + * Similar to SerializeInternalFieldsCallback, but works with the embedder data + * in a v8::Context. + */ +struct SerializeContextDataCallback { + using CallbackFunction = StartupData (*)(Local holder, int index, + void* data); + SerializeContextDataCallback(CallbackFunction function = nullptr, + void* data_arg = nullptr) + : callback(function), data(data_arg) {} + CallbackFunction callback; + void* data; +}; /** * Callback and supporting data used to implement embedder logic to deserialize - * internal fields. + * internal fields of v8::Objects. */ struct DeserializeInternalFieldsCallback { using CallbackFunction = void (*)(Local holder, int index, @@ -67,12 +78,24 @@ struct DeserializeInternalFieldsCallback { DeserializeInternalFieldsCallback(CallbackFunction function = nullptr, void* data_arg = nullptr) : callback(function), data(data_arg) {} - void (*callback)(Local holder, int index, StartupData payload, - void* data); + + CallbackFunction callback; void* data; }; -using DeserializeEmbedderFieldsCallback = DeserializeInternalFieldsCallback; +/** + * Similar to DeserializeInternalFieldsCallback, but works with the embedder + * data in a v8::Context. + */ +struct DeserializeContextDataCallback { + using CallbackFunction = void (*)(Local holder, int index, + StartupData payload, void* data); + DeserializeContextDataCallback(CallbackFunction function = nullptr, + void* data_arg = nullptr) + : callback(function), data(data_arg) {} + CallbackFunction callback; + void* data; +}; /** * Helper class to create a snapshot data blob. @@ -156,23 +179,37 @@ class V8_EXPORT SnapshotCreator { * The snapshot will not contain the global proxy, and we expect one or a * global object template to create one, to be provided upon deserialization. * - * \param callback optional callback to serialize internal fields. + * \param internal_fields_serializer An optional callback used to serialize + * internal pointer fields set by + * v8::Object::SetAlignedPointerInInternalField(). + * + * \param context_data_serializer An optional callback used to serialize + * context embedder data set by + * v8::Context::SetAlignedPointerInEmbedderData(). + * */ - void SetDefaultContext(Local context, - SerializeInternalFieldsCallback callback = - SerializeInternalFieldsCallback()); + void SetDefaultContext( + Local context, + SerializeInternalFieldsCallback internal_fields_serializer = + SerializeInternalFieldsCallback(), + SerializeContextDataCallback context_data_serializer = + SerializeContextDataCallback()); /** * Add additional context to be included in the snapshot blob. * The snapshot will include the global proxy. * - * \param callback optional callback to serialize internal fields. + * \param internal_fields_serializer Similar to internal_fields_serializer + * in SetDefaultContext() but only applies to the context being added. * - * \returns the index of the context in the snapshot blob. + * \param context_data_serializer Similar to context_data_serializer + * in SetDefaultContext() but only applies to the context being added. */ size_t AddContext(Local context, - SerializeInternalFieldsCallback callback = - SerializeInternalFieldsCallback()); + SerializeInternalFieldsCallback internal_fields_serializer = + SerializeInternalFieldsCallback(), + SerializeContextDataCallback context_data_serializer = + SerializeContextDataCallback()); /** * Attach arbitrary V8::Data to the context snapshot, which can be retrieved diff --git a/deps/v8/include/v8-statistics.h b/deps/v8/include/v8-statistics.h index aeca8cf44843e8..82b78f5ec65729 100644 --- a/deps/v8/include/v8-statistics.h +++ b/deps/v8/include/v8-statistics.h @@ -61,33 +61,8 @@ class V8_EXPORT MeasureMemoryDelegate { */ virtual bool ShouldMeasure(Local context) = 0; - /** - * This function is called when memory measurement finishes. - * - * \param context_sizes_in_bytes a vector of (context, size) pairs that - * includes each context for which ShouldMeasure returned true and that - * was not garbage collected while the memory measurement was in progress. - * - * \param unattributed_size_in_bytes total size of objects that were not - * attributed to any context (i.e. are likely shared objects). - */ - V8_DEPRECATED("Please use the version that takes a result struct") - virtual void MeasurementComplete( - const std::vector, size_t>>& - context_sizes_in_bytes, - size_t unattributed_size_in_bytes) {} - /** Holds the result of a memory measurement request. */ struct Result { - /** - * A vector of (context, size) pairs that includes each context for - * which ShouldMeasure returned true and that was not garbage collected - * while the memory measurement was in progress. - */ - V8_DEPRECATED("Please use contexts and sizes_in_bytes") - const std::vector, size_t>>& - context_sizes_in_bytes; - /** * Two spans of equal length: the first includes each context for which * ShouldMeasure returned true and that was not garbage collected while diff --git a/deps/v8/include/v8-template.h b/deps/v8/include/v8-template.h index 674d4201d5d782..6a0c898f4507d2 100644 --- a/deps/v8/include/v8-template.h +++ b/deps/v8/include/v8-template.h @@ -94,6 +94,7 @@ class V8_EXPORT Template : public Data { PropertyAttribute attribute, AccessControl settings, SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect, SideEffectType setter_side_effect_type = SideEffectType::kHasSideEffect); + V8_DEPRECATE_SOON("Use SetNativeDataProperty with Local instead") void SetNativeDataProperty( Local name, AccessorGetterCallback getter, AccessorSetterCallback setter = nullptr, @@ -131,27 +132,35 @@ class V8_EXPORT Template : public Data { friend class FunctionTemplate; }; -// TODO(dcarney): Replace GenericNamedPropertyFooCallback with just -// NamedPropertyFooCallback. +/** + * Interceptor callbacks use this value to indicate whether the request was + * intercepted or not. + */ +enum class Intercepted : uint8_t { kNo = 0, kYes = 1 }; /** * Interceptor for get requests on an object. * - * Use `info.GetReturnValue().Set()` to set the return value of the - * intercepted get request. If the property does not exist the callback should - * not set the result and must not produce side effects. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should + * - (optionally) use info.GetReturnValue().Set()` to set the return value + * (by default the result is set to v8::Undefined), + * - return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. * \param info Information about the intercepted request, such as - * isolate, receiver, return value, or whether running in `'use strict`' mode. + * isolate, receiver, return value, or whether running in `'use strict'` mode. * See `PropertyCallbackInfo`. * * \code - * void GetterCallback( - * Local name, - * const v8::PropertyCallbackInfo& info) { - * info.GetReturnValue().Set(v8_num(42)); + * Intercepted GetterCallback( + * Local name, const v8::PropertyCallbackInfo& info) { + * if (!IsKnownProperty(info.GetIsolate(), name)) return Intercepted::kNo; + * info.GetReturnValue().Set(v8_num(42)); + * return Intercepted::kYes; * } * * v8::Local templ = @@ -171,18 +180,23 @@ class V8_EXPORT Template : public Data { * * See also `ObjectTemplate::SetHandler`. */ +using NamedPropertyGetterCallback = Intercepted (*)( + Local property, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue().Set()` to set the return value of the +// intercepted get request. If the property does not exist the callback should +// not set the result and must not produce side effects. using GenericNamedPropertyGetterCallback = void (*)(Local property, const PropertyCallbackInfo& info); /** * Interceptor for set requests on an object. * - * Use `info.GetReturnValue()` to indicate whether the request was intercepted - * or not. If the setter successfully intercepts the request, i.e., if the - * request should not be further executed, call - * `info.GetReturnValue().Set(value)`. If the setter did not intercept the - * request, i.e., if the request should be handled as if no interceptor is - * present, do not not call `Set()` and do not produce side effects. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. @@ -192,9 +206,19 @@ using GenericNamedPropertyGetterCallback = * isolate, receiver, return value, or whether running in `'use strict'` mode. * See `PropertyCallbackInfo`. * - * See also - * `ObjectTemplate::SetHandler.` + * See also `ObjectTemplate::SetHandler.` */ +using NamedPropertySetterCallback = + Intercepted (*)(Local property, Local value, + const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue()` to indicate whether the request was intercepted +// or not. If the setter successfully intercepts the request, i.e., if the +// request should not be further executed, call +// `info.GetReturnValue().Set(value)`. If the setter did not intercept the +// request, i.e., if the request should be handled as if no interceptor is +// present, do not not call `Set()` and do not produce side effects. using GenericNamedPropertySetterCallback = void (*)(Local property, Local value, const PropertyCallbackInfo& info); @@ -204,10 +228,13 @@ using GenericNamedPropertySetterCallback = * property, e.g., getOwnPropertyDescriptor(), propertyIsEnumerable(), and * defineProperty(). * - * Use `info.GetReturnValue().Set(value)` to set the property attributes. The - * value is an integer encoding a `v8::PropertyAttribute`. If the property does - * not exist the callback should not set the result and must not produce side - * effects. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should + * - use `info.GetReturnValue().Set()` to set to an Integer value encoding + * a `v8::PropertyAttribute` bits, + * - return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. @@ -219,21 +246,29 @@ using GenericNamedPropertySetterCallback = * they do not return the attributes. For example, `hasOwnProperty()` can * trigger this interceptor depending on the state of the object. * - * See also - * `ObjectTemplate::SetHandler.` + * See also `ObjectTemplate::SetHandler.` */ +using NamedPropertyQueryCallback = Intercepted (*)( + Local property, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue().Set(value)` to set the property attributes. The +// value is an integer encoding a `v8::PropertyAttribute`. If the property does +// not exist the callback should not set the result and must not produce side +// effects. using GenericNamedPropertyQueryCallback = void (*)(Local property, const PropertyCallbackInfo& info); /** * Interceptor for delete requests on an object. * - * Use `info.GetReturnValue()` to indicate whether the request was intercepted - * or not. If the deleter successfully intercepts the request, i.e., if the - * request should not be further executed, call - * `info.GetReturnValue().Set(value)` with a boolean `value`. The `value` is - * used as the return value of `delete`. If the deleter does not intercept the - * request then it should not set the result and must not produce side effects. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should + * - use `info.GetReturnValue().Set()` to set to a Boolean value indicating + * whether the property deletion was successful or not, + * - return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. @@ -247,6 +282,16 @@ using GenericNamedPropertyQueryCallback = * * See also `ObjectTemplate::SetHandler.` */ +using NamedPropertyDeleterCallback = Intercepted (*)( + Local property, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue()` to indicate whether the request was intercepted +// or not. If the deleter successfully intercepts the request, i.e., if the +// request should not be further executed, call +// `info.GetReturnValue().Set(value)` with a boolean `value`. The `value` is +// used as the return value of `delete`. If the deleter does not intercept the +// request then it should not set the result and must not produce side effects. using GenericNamedPropertyDeleterCallback = void (*)(Local property, const PropertyCallbackInfo& info); @@ -256,18 +301,19 @@ using GenericNamedPropertyDeleterCallback = * * Note: The values in the array must be of type v8::Name. */ -using GenericNamedPropertyEnumeratorCallback = +using NamedPropertyEnumeratorCallback = void (*)(const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// This is just a renaming of the typedef. +using GenericNamedPropertyEnumeratorCallback = NamedPropertyEnumeratorCallback; /** * Interceptor for defineProperty requests on an object. * - * Use `info.GetReturnValue()` to indicate whether the request was intercepted - * or not. If the definer successfully intercepts the request, i.e., if the - * request should not be further executed, call - * `info.GetReturnValue().Set(value)`. If the definer did not intercept the - * request, i.e., if the request should be handled as if no interceptor is - * present, do not not call `Set()` and do not produce side effects. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. @@ -279,6 +325,17 @@ using GenericNamedPropertyEnumeratorCallback = * * See also `ObjectTemplate::SetHandler`. */ +using NamedPropertyDefinerCallback = + Intercepted (*)(Local property, const PropertyDescriptor& desc, + const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue()` to indicate whether the request was intercepted +// or not. If the definer successfully intercepts the request, i.e., if the +// request should not be further executed, call +// `info.GetReturnValue().Set(value)`. If the definer did not intercept the +// request, i.e., if the request should be handled as if no interceptor is +// present, do not not call `Set()` and do not produce side effects. using GenericNamedPropertyDefinerCallback = void (*)(Local property, const PropertyDescriptor& desc, const PropertyCallbackInfo& info); @@ -286,10 +343,14 @@ using GenericNamedPropertyDefinerCallback = /** * Interceptor for getOwnPropertyDescriptor requests on an object. * - * Use `info.GetReturnValue().Set()` to set the return value of the - * intercepted request. The return value must be an object that - * can be converted to a PropertyDescriptor, e.g., a `v8::value` returned from - * `v8::Object::getOwnPropertyDescriptor`. + * If the interceptor handles the request (i.e. the property should not be + * looked up beyond the interceptor) it should + * - use `info.GetReturnValue().Set()` to set the return value which must be + * object that can be converted to a PropertyDescriptor (for example, + * a value returned by `v8::Object::getOwnPropertyDescriptor`), + * - return `Intercepted::kYes`. + * If the interceptor does not handle the request it must return + * `Intercepted::kNo` and it must not produce side effects. * * \param property The name of the property for which the request was * intercepted. @@ -302,18 +363,36 @@ using GenericNamedPropertyDefinerCallback = * * See also `ObjectTemplate::SetHandler`. */ +using NamedPropertyDescriptorCallback = Intercepted (*)( + Local property, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. +// +// Use `info.GetReturnValue().Set()` to set the return value of the +// intercepted request. The return value must be an object that +// can be converted to a PropertyDescriptor, e.g., a `v8::Value` returned from +// `v8::Object::getOwnPropertyDescriptor`. using GenericNamedPropertyDescriptorCallback = void (*)(Local property, const PropertyCallbackInfo& info); +// TODO(ishell): Rename IndexedPropertyXxxCallbackV2 back to +// IndexedPropertyXxxCallback once the old IndexedPropertyXxxCallback is +// removed. + /** * See `v8::GenericNamedPropertyGetterCallback`. */ +using IndexedPropertyGetterCallbackV2 = + Intercepted (*)(uint32_t index, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertyGetterCallback = void (*)(uint32_t index, const PropertyCallbackInfo& info); /** * See `v8::GenericNamedPropertySetterCallback`. */ +using IndexedPropertySetterCallbackV2 = Intercepted (*)( + uint32_t index, Local value, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertySetterCallback = void (*)(uint32_t index, Local value, const PropertyCallbackInfo& info); @@ -321,12 +400,18 @@ using IndexedPropertySetterCallback = /** * See `v8::GenericNamedPropertyQueryCallback`. */ +using IndexedPropertyQueryCallbackV2 = + Intercepted (*)(uint32_t index, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertyQueryCallback = void (*)(uint32_t index, const PropertyCallbackInfo& info); /** * See `v8::GenericNamedPropertyDeleterCallback`. */ +using IndexedPropertyDeleterCallbackV2 = + Intercepted (*)(uint32_t index, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertyDeleterCallback = void (*)(uint32_t index, const PropertyCallbackInfo& info); @@ -342,6 +427,10 @@ using IndexedPropertyEnumeratorCallback = /** * See `v8::GenericNamedPropertyDefinerCallback`. */ +using IndexedPropertyDefinerCallbackV2 = + Intercepted (*)(uint32_t index, const PropertyDescriptor& desc, + const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertyDefinerCallback = void (*)(uint32_t index, const PropertyDescriptor& desc, const PropertyCallbackInfo& info); @@ -349,6 +438,9 @@ using IndexedPropertyDefinerCallback = /** * See `v8::GenericNamedPropertyDescriptorCallback`. */ +using IndexedPropertyDescriptorCallbackV2 = + Intercepted (*)(uint32_t index, const PropertyCallbackInfo& info); +// This variant will be deprecated soon. using IndexedPropertyDescriptorCallback = void (*)(uint32_t index, const PropertyCallbackInfo& info); @@ -611,7 +703,8 @@ enum class PropertyHandlerFlags { */ kNone = 0, - /** Will not call into interceptor for properties on the receiver or prototype + /** + * Will not call into interceptor for properties on the receiver or prototype * chain, i.e., only call into interceptor for properties that do not exist. * Currently only valid for named interceptors. */ @@ -627,9 +720,49 @@ enum class PropertyHandlerFlags { * The getter, query, enumerator callbacks do not produce side effects. */ kHasNoSideEffect = 1 << 2, + + /** + * This flag is used to distinguish which callbacks were provided - + * GenericNamedPropertyXXXCallback (old signature) or + * NamedPropertyXXXCallback (new signature). + * DO NOT use this flag, it'll be removed once embedders migrate to new + * callbacks signatures. + */ + kInternalNewCallbacksSignatures = 1 << 10, }; struct NamedPropertyHandlerConfiguration { + private: + static constexpr PropertyHandlerFlags WithNewSignatureFlag( + PropertyHandlerFlags flags) { + return static_cast( + static_cast(flags) | + static_cast( + PropertyHandlerFlags::kInternalNewCallbacksSignatures)); + } + + public: + NamedPropertyHandlerConfiguration( + NamedPropertyGetterCallback getter, // + NamedPropertySetterCallback setter, // + NamedPropertyQueryCallback query, // + NamedPropertyDeleterCallback deleter, // + NamedPropertyEnumeratorCallback enumerator, // + NamedPropertyDefinerCallback definer, // + NamedPropertyDescriptorCallback descriptor, // + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), + data(data), + flags(WithNewSignatureFlag(flags)) {} + + // This variant will be deprecated soon. NamedPropertyHandlerConfiguration( GenericNamedPropertyGetterCallback getter, GenericNamedPropertySetterCallback setter, @@ -640,35 +773,73 @@ struct NamedPropertyHandlerConfiguration { GenericNamedPropertyDescriptorCallback descriptor, Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), - query(query), - deleter(deleter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), - definer(definer), - descriptor(descriptor), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), data(data), flags(flags) {} - NamedPropertyHandlerConfiguration( - /** Note: getter is required */ - GenericNamedPropertyGetterCallback getter = nullptr, + explicit NamedPropertyHandlerConfiguration( + NamedPropertyGetterCallback getter, + NamedPropertySetterCallback setter = nullptr, + NamedPropertyQueryCallback query = nullptr, + NamedPropertyDeleterCallback deleter = nullptr, + NamedPropertyEnumeratorCallback enumerator = nullptr, + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(nullptr), + descriptor(nullptr), + data(data), + flags(WithNewSignatureFlag(flags)) {} + + // This variant will be deprecated soon. + explicit NamedPropertyHandlerConfiguration( + GenericNamedPropertyGetterCallback getter, GenericNamedPropertySetterCallback setter = nullptr, GenericNamedPropertyQueryCallback query = nullptr, GenericNamedPropertyDeleterCallback deleter = nullptr, GenericNamedPropertyEnumeratorCallback enumerator = nullptr, Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), - query(query), - deleter(deleter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), definer(nullptr), descriptor(nullptr), data(data), flags(flags) {} + NamedPropertyHandlerConfiguration( + NamedPropertyGetterCallback getter, // + NamedPropertySetterCallback setter, // + NamedPropertyDescriptorCallback descriptor, // + NamedPropertyDeleterCallback deleter, // + NamedPropertyEnumeratorCallback enumerator, // + NamedPropertyDefinerCallback definer, // + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(nullptr), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), + data(data), + flags(WithNewSignatureFlag(flags)) {} + + // This variant will be deprecated soon. NamedPropertyHandlerConfiguration( GenericNamedPropertyGetterCallback getter, GenericNamedPropertySetterCallback setter, @@ -678,66 +849,136 @@ struct NamedPropertyHandlerConfiguration { GenericNamedPropertyDefinerCallback definer, Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), query(nullptr), - deleter(deleter), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), - definer(definer), - descriptor(descriptor), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), data(data), flags(flags) {} - GenericNamedPropertyGetterCallback getter; - GenericNamedPropertySetterCallback setter; - GenericNamedPropertyQueryCallback query; - GenericNamedPropertyDeleterCallback deleter; - GenericNamedPropertyEnumeratorCallback enumerator; - GenericNamedPropertyDefinerCallback definer; - GenericNamedPropertyDescriptorCallback descriptor; + void* getter; // [Generic]NamedPropertyGetterCallback + void* setter; // [Generic]NamedPropertySetterCallback + void* query; // [Generic]NamedPropertyQueryCallback + void* deleter; // [Generic]NamedPropertyDeleterCallback + NamedPropertyEnumeratorCallback enumerator; + void* definer; // [Generic]NamedPropertyDefinerCallback + void* descriptor; // [Generic]NamedPropertyDescriptorCallback Local data; PropertyHandlerFlags flags; }; struct IndexedPropertyHandlerConfiguration { + private: + static constexpr PropertyHandlerFlags WithNewSignatureFlag( + PropertyHandlerFlags flags) { + return static_cast( + static_cast(flags) | + static_cast( + PropertyHandlerFlags::kInternalNewCallbacksSignatures)); + } + + public: IndexedPropertyHandlerConfiguration( - IndexedPropertyGetterCallback getter, - IndexedPropertySetterCallback setter, IndexedPropertyQueryCallback query, - IndexedPropertyDeleterCallback deleter, - IndexedPropertyEnumeratorCallback enumerator, - IndexedPropertyDefinerCallback definer, - IndexedPropertyDescriptorCallback descriptor, + IndexedPropertyGetterCallbackV2 getter, // + IndexedPropertySetterCallbackV2 setter, // + IndexedPropertyQueryCallbackV2 query, // + IndexedPropertyDeleterCallbackV2 deleter, // + IndexedPropertyEnumeratorCallback enumerator, // + IndexedPropertyDefinerCallbackV2 definer, // + IndexedPropertyDescriptorCallbackV2 descriptor, // Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), - query(query), - deleter(deleter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), - definer(definer), - descriptor(descriptor), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), data(data), - flags(flags) {} + flags(WithNewSignatureFlag(flags)) {} + // This variant will be deprecated soon. IndexedPropertyHandlerConfiguration( - /** Note: getter is required */ - IndexedPropertyGetterCallback getter = nullptr, + IndexedPropertyGetterCallback getter, // + IndexedPropertySetterCallback setter, // + IndexedPropertyQueryCallback query, // + IndexedPropertyDeleterCallback deleter, // + IndexedPropertyEnumeratorCallback enumerator, // + IndexedPropertyDefinerCallback definer, // + IndexedPropertyDescriptorCallback descriptor, // + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), + data(data), + flags(flags) {} + + explicit IndexedPropertyHandlerConfiguration( + IndexedPropertyGetterCallbackV2 getter = nullptr, + IndexedPropertySetterCallbackV2 setter = nullptr, + IndexedPropertyQueryCallbackV2 query = nullptr, + IndexedPropertyDeleterCallbackV2 deleter = nullptr, + IndexedPropertyEnumeratorCallback enumerator = nullptr, + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(nullptr), + descriptor(nullptr), + data(data), + flags(WithNewSignatureFlag(flags)) {} + + // This variant will be deprecated soon. + explicit IndexedPropertyHandlerConfiguration( + IndexedPropertyGetterCallback getter, IndexedPropertySetterCallback setter = nullptr, IndexedPropertyQueryCallback query = nullptr, IndexedPropertyDeleterCallback deleter = nullptr, IndexedPropertyEnumeratorCallback enumerator = nullptr, Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), - query(query), - deleter(deleter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(reinterpret_cast(query)), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), definer(nullptr), descriptor(nullptr), data(data), flags(flags) {} + IndexedPropertyHandlerConfiguration( + IndexedPropertyGetterCallbackV2 getter, + IndexedPropertySetterCallbackV2 setter, + IndexedPropertyDescriptorCallbackV2 descriptor, + IndexedPropertyDeleterCallbackV2 deleter, + IndexedPropertyEnumeratorCallback enumerator, + IndexedPropertyDefinerCallbackV2 definer, + Local data = Local(), + PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), + query(nullptr), + deleter(reinterpret_cast(deleter)), + enumerator(enumerator), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), + data(data), + flags(WithNewSignatureFlag(flags)) {} + + // This variant will be deprecated soon. IndexedPropertyHandlerConfiguration( IndexedPropertyGetterCallback getter, IndexedPropertySetterCallback setter, @@ -747,23 +988,23 @@ struct IndexedPropertyHandlerConfiguration { IndexedPropertyDefinerCallback definer, Local data = Local(), PropertyHandlerFlags flags = PropertyHandlerFlags::kNone) - : getter(getter), - setter(setter), + : getter(reinterpret_cast(getter)), + setter(reinterpret_cast(setter)), query(nullptr), - deleter(deleter), + deleter(reinterpret_cast(deleter)), enumerator(enumerator), - definer(definer), - descriptor(descriptor), + definer(reinterpret_cast(definer)), + descriptor(reinterpret_cast(descriptor)), data(data), flags(flags) {} - IndexedPropertyGetterCallback getter; - IndexedPropertySetterCallback setter; - IndexedPropertyQueryCallback query; - IndexedPropertyDeleterCallback deleter; + void* getter; // IndexedPropertyGetterCallback[V2] + void* setter; // IndexedPropertySetterCallback[V2] + void* query; // IndexedPropertyQueryCallback[V2] + void* deleter; // IndexedPropertyDeleterCallback[V2] IndexedPropertyEnumeratorCallback enumerator; - IndexedPropertyDefinerCallback definer; - IndexedPropertyDescriptorCallback descriptor; + void* definer; // IndexedPropertyDefinerCallback[V2] + void* descriptor; // IndexedPropertyDescriptorCallback[V2] Local data; PropertyHandlerFlags flags; }; @@ -804,6 +1045,7 @@ class V8_EXPORT ObjectTemplate : public Template { * \param attribute The attributes of the property for which an accessor * is added. */ + V8_DEPRECATE_SOON("Use SetAccessor with Local instead") void SetAccessor( Local name, AccessorGetterCallback getter, AccessorSetterCallback setter = nullptr, @@ -846,7 +1088,7 @@ class V8_EXPORT ObjectTemplate : public Template { * \param data A piece of data that will be passed to the callbacks * whenever they are invoked. */ - // TODO(dcarney): deprecate + V8_DEPRECATE_SOON("Use SetHandler instead") void SetIndexedPropertyHandler( IndexedPropertyGetterCallback getter, IndexedPropertySetterCallback setter = nullptr, @@ -951,8 +1193,7 @@ class V8_EXPORT ObjectTemplate : public Template { private: ObjectTemplate(); - static Local New(internal::Isolate* isolate, - Local constructor); + static void CheckCast(Data* that); friend class FunctionTemplate; }; diff --git a/deps/v8/include/v8-typed-array.h b/deps/v8/include/v8-typed-array.h index 9cb645fb02c4d1..66e21f470acb7e 100644 --- a/deps/v8/include/v8-typed-array.h +++ b/deps/v8/include/v8-typed-array.h @@ -249,6 +249,30 @@ class V8_EXPORT Int32Array : public TypedArray { static void CheckCast(Value* obj); }; +/** + * An instance of Float16Array constructor. + */ +class V8_EXPORT Float16Array : public TypedArray { + static constexpr size_t kMaxLength = + TypedArray::kMaxByteLength / sizeof(uint16_t); + + public: + static Local New(Local array_buffer, + size_t byte_offset, size_t length); + static Local New(Local shared_array_buffer, + size_t byte_offset, size_t length); + V8_INLINE static Float16Array* Cast(Value* value) { +#ifdef V8_ENABLE_CHECKS + CheckCast(value); +#endif + return static_cast(value); + } + + private: + Float16Array(); + static void CheckCast(Value* obj); +}; + /** * An instance of Float32Array constructor (ES6 draft 15.13.6). */ diff --git a/deps/v8/include/v8-value.h b/deps/v8/include/v8-value.h index 9356cd626163dd..ac04525d86d759 100644 --- a/deps/v8/include/v8-value.h +++ b/deps/v8/include/v8-value.h @@ -301,6 +301,11 @@ class V8_EXPORT Value : public Data { */ bool IsInt32Array() const; + /** + * Returns true if this value is a Float16Array. + */ + bool IsFloat16Array() const; + /** * Returns true if this value is a Float32Array. */ diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index c3c0da86379d07..f16596a58fa667 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -9,9 +9,9 @@ // NOTE these macros are used by some of the tool scripts and the build // system so their names cannot be changed without changing the scripts. #define V8_MAJOR_VERSION 12 -#define V8_MINOR_VERSION 3 -#define V8_BUILD_NUMBER 219 -#define V8_PATCH_LEVEL 16 +#define V8_MINOR_VERSION 4 +#define V8_BUILD_NUMBER 254 +#define V8_PATCH_LEVEL 14 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/infra/mb/mb_config.pyl b/deps/v8/infra/mb/mb_config.pyl index c9b20e7d643cdc..a498d240e3fd83 100644 --- a/deps/v8/infra/mb/mb_config.pyl +++ b/deps/v8/infra/mb/mb_config.pyl @@ -69,7 +69,7 @@ 'V8 Linux64 - builder (goma cache silo)': 'release_x64', 'V8 Linux64 - builder (reclient)': 'release_x64_reclient', 'V8 Linux64 - builder (reclient compare)': 'release_x64_reclient', - 'V8 Linux64 - official - builder': 'official_x64', + 'V8 Linux64 - official - builder': 'official_x64_on_release_branch', 'V8 Linux64 - debug builder': 'debug_x64', 'V8 Linux64 - no shared cage - debug builder': 'debug_x64_no_shared_cage', 'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space', @@ -117,6 +117,7 @@ # FYI. 'V8 iOS - sim - builder': 'release_x64_ios_simulator', 'V8 Linux64 - arm64 - builder': 'release_arm64', + 'V8 Linux64 - arm64 - no pointer compression - builder': 'release_arm64_no_pointer_compression', 'V8 Linux64 - arm64 - debug builder': 'debug_arm64', 'V8 Linux64 - arm64 - sim - no pointer compression - builder': 'release_simulate_arm64_no_pointer_compression', @@ -183,8 +184,10 @@ 'V8 Clusterfuzz Linux64 TSAN - release builder': 'release_x64_tsan', 'V8 Clusterfuzz Linux64 UBSan - release builder': 'release_x64_ubsan_recover', + 'V8 Clusterfuzz Linux64 sandbox testing - release builder': + 'release_x64_sandbox_testing', 'V8 Clusterfuzz Linux64 ASAN sandbox testing - release builder': - 'release_x64_asan_symbolized_expose_memory_corruption', + 'release_x64_asan_sandbox_testing', }, 'client.v8.perf' : { # Arm @@ -257,6 +260,7 @@ 'v8_linux_vtunejit': 'debug_x86_vtunejit', 'v8_linux64_arm64_compile_dbg': 'debug_arm64_trybot', 'v8_linux64_arm64_compile_rel': 'release_arm64_trybot', + 'v8_linux64_native_arm64_no_pointer_compression_compile_rel': 'release_arm64_no_pointer_compression_trybot', 'v8_linux64_arm64_no_pointer_compression_compile_rel': 'release_simulate_arm64_no_pointer_compression', 'v8_linux64_asan_centipede_compile_dbg': 'debug_x64_asan_centipede', @@ -284,7 +288,7 @@ 'v8_linux64_no_pointer_compression_compile_rel': 'release_x64_no_pointer_compression', 'v8_linux64_compile_rel': 'release_x64_test_features_gcmole_trybot', 'v8_linux64_no_sandbox_compile_rel': 'release_x64_no_sandbox', - 'v8_linux64_official_compile_rel': 'official_x64', + 'v8_linux64_official_compile_rel': 'official_x64_on_release_branch', 'v8_linux64_predictable_compile_rel': 'release_x64_predictable', 'v8_linux64_pku_compile_rel': 'release_x64', 'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap', @@ -293,7 +297,8 @@ 'v8_linux64_arm64_no_wasm_compile_dbg': 'debug_arm64_webassembly_disabled', 'v8_linux64_verify_csa_compile_rel': 'release_x64_verify_csa', 'v8_linux64_asan_compile_rel': 'release_x64_asan_minimal_symbols', - 'v8_linux64_asan_sandbox_compile_rel': 'release_x64_asan_symbolized_expose_memory_corruption', + 'v8_linux64_sandbox_testing_compile_rel': 'release_x64_sandbox_testing', + 'v8_linux64_asan_sandbox_testing_compile_rel': 'release_x64_asan_sandbox_testing', 'v8_linux64_cfi_compile_rel': 'release_x64_cfi', 'v8_linux64_fuzzilli_compile_rel': 'release_x64_fuzzilli', 'v8_linux64_loong64_compile_rel': 'release_simulate_loong64', @@ -507,10 +512,14 @@ 'release_bot', 'arm', 'hard_float'], 'release_arm64': [ 'release_bot', 'arm64'], + 'release_arm64_no_pointer_compression': [ + 'release_bot', 'arm64', 'v8_disable_pointer_compression'], 'release_arm64_trybot': [ 'release_trybot', 'arm64'], 'release_arm64_hard_float': [ 'release_bot', 'arm64', 'hard_float'], + 'release_arm64_no_pointer_compression_trybot': [ + 'release_trybot', 'arm64', 'v8_disable_pointer_compression'], 'release_android_arm': [ 'release_bot', 'arm', 'android', 'minimal_symbols', 'android_strip_outputs'], @@ -562,9 +571,11 @@ 'release_x64_asan_no_lsan_verify_heap_dchecks': [ 'release_bot', 'x64', 'asan', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_verify_heap'], - 'release_x64_asan_symbolized_expose_memory_corruption': [ + 'release_x64_sandbox_testing': [ + 'release_bot', 'x64', 'symbolized', 'v8_enable_memory_corruption_api'], + 'release_x64_asan_sandbox_testing': [ 'release_bot', 'x64', 'asan', 'symbolized', - 'v8_expose_memory_corruption_api'], + 'v8_enable_memory_corruption_api'], 'release_x64_asan_symbolized_verify_heap': [ 'release_bot', 'x64', 'asan', 'lsan', 'symbolized', 'v8_verify_heap'], @@ -642,6 +653,9 @@ 'official_x64_pgo': [ 'release_bot', 'x64', 'official', 'disable_chrome_pgo', 'builtins_optimization'], + 'official_x64_on_release_branch': [ + 'release_bot', 'x64', 'official', 'disable_chrome_pgo', + 'v8_is_on_release_branch'], # Debug configs for x64. 'debug_x64': [ @@ -1042,8 +1056,8 @@ 'gn_args': 'v8_enable_verify_heap=false', }, - 'v8_expose_memory_corruption_api': { - 'gn_args': 'v8_expose_memory_corruption_api=true', + 'v8_enable_memory_corruption_api': { + 'gn_args': 'v8_enable_memory_corruption_api=true', }, 'v8_enable_lite_mode': { @@ -1107,6 +1121,10 @@ 'gn_args': 'v8_optimized_debug=false', }, + 'v8_is_on_release_branch': { + 'gn_args': 'v8_is_on_release_branch=true', + }, + 'v8_optimized_debug': { # This is the default in gn for debug. }, diff --git a/deps/v8/infra/testing/builders.pyl b/deps/v8/infra/testing/builders.pyl index 674f5f2d572151..2aca712ca73f8a 100644 --- a/deps/v8/infra/testing/builders.pyl +++ b/deps/v8/infra/testing/builders.pyl @@ -402,6 +402,9 @@ }, 'tests': [ {'name': 'v8testing', 'shards': 5}, + {'name': 'benchmarks', 'shards': 5}, + {'name': 'mozilla', 'shards': 5}, + {'name': 'test262', 'shards': 10}, ], }, 'v8_linux64_dbg': { @@ -553,7 +556,6 @@ {'name': 'benchmarks', 'variant': 'minor_ms'}, {'name': 'mozilla', 'variant': 'minor_ms'}, {'name': 'test262', 'variant': 'minor_ms', 'shards': 2}, - {'name': 'mjsunit', 'variant': 'minor_ms'}, ], }, 'v8_linux64_msan_rel': { @@ -1506,7 +1508,6 @@ {'name': 'benchmarks', 'variant': 'minor_ms'}, {'name': 'mozilla', 'variant': 'minor_ms'}, {'name': 'test262', 'variant': 'minor_ms', 'shards': 2}, - {'name': 'mjsunit', 'variant': 'minor_ms'}, ], }, 'V8 Linux64 - disable runtime call stats': { @@ -1732,6 +1733,9 @@ }, 'tests': [ {'name': 'v8testing', 'shards': 5}, + {'name': 'benchmarks', 'shards': 5}, + {'name': 'mozilla', 'shards': 5}, + {'name': 'test262', 'shards': 10}, ], }, 'V8 Linux64 GC Stress - custom snapshot': { diff --git a/deps/v8/samples/process.cc b/deps/v8/samples/process.cc index eba72bc0c238db..d773e9b9d1870c 100644 --- a/deps/v8/samples/process.cc +++ b/deps/v8/samples/process.cc @@ -140,13 +140,13 @@ class JsHttpRequestProcessor : public HttpRequestProcessor { static Local MakeMapTemplate(Isolate* isolate); // Callbacks that access the individual fields of request objects. - static void GetPath(Local name, + static void GetPath(Local name, const PropertyCallbackInfo& info); - static void GetReferrer(Local name, + static void GetReferrer(Local name, const PropertyCallbackInfo& info); - static void GetHost(Local name, + static void GetHost(Local name, const PropertyCallbackInfo& info); - static void GetUserAgent(Local name, + static void GetUserAgent(Local name, const PropertyCallbackInfo& info); // Callbacks that access maps @@ -507,8 +507,7 @@ HttpRequest* JsHttpRequestProcessor::UnwrapRequest(Local obj) { return static_cast(ptr); } - -void JsHttpRequestProcessor::GetPath(Local name, +void JsHttpRequestProcessor::GetPath(Local name, const PropertyCallbackInfo& info) { // Extract the C++ request object from the JavaScript wrapper. HttpRequest* request = UnwrapRequest(info.Holder()); @@ -523,10 +522,8 @@ void JsHttpRequestProcessor::GetPath(Local name, static_cast(path.length())).ToLocalChecked()); } - void JsHttpRequestProcessor::GetReferrer( - Local name, - const PropertyCallbackInfo& info) { + Local name, const PropertyCallbackInfo& info) { HttpRequest* request = UnwrapRequest(info.Holder()); const string& path = request->Referrer(); info.GetReturnValue().Set( @@ -535,8 +532,7 @@ void JsHttpRequestProcessor::GetReferrer( static_cast(path.length())).ToLocalChecked()); } - -void JsHttpRequestProcessor::GetHost(Local name, +void JsHttpRequestProcessor::GetHost(Local name, const PropertyCallbackInfo& info) { HttpRequest* request = UnwrapRequest(info.Holder()); const string& path = request->Host(); @@ -546,10 +542,8 @@ void JsHttpRequestProcessor::GetHost(Local name, static_cast(path.length())).ToLocalChecked()); } - void JsHttpRequestProcessor::GetUserAgent( - Local name, - const PropertyCallbackInfo& info) { + Local name, const PropertyCallbackInfo& info) { HttpRequest* request = UnwrapRequest(info.Holder()); const string& path = request->UserAgent(); info.GetReturnValue().Set( @@ -558,7 +552,6 @@ void JsHttpRequestProcessor::GetUserAgent( static_cast(path.length())).ToLocalChecked()); } - Local JsHttpRequestProcessor::MakeRequestTemplate( Isolate* isolate) { EscapableHandleScope handle_scope(isolate); diff --git a/deps/v8/src/DEPS b/deps/v8/src/DEPS index aeedfd8bb70aee..d9c58d01236454 100644 --- a/deps/v8/src/DEPS +++ b/deps/v8/src/DEPS @@ -15,8 +15,9 @@ include_rules = [ "+src/compiler/turboshaft/wasm-turboshaft-compiler.h", "+src/compiler/wasm-compiler-definitions.h", "+src/compiler/wasm-compiler.h", + "-src/flags/flags-impl.h", "-src/heap", - "+src/heap/basic-memory-chunk.h", + "+src/heap/memory-chunk-metadata.h", "+src/heap/code-range.h", "+src/heap/trusted-range.h", "+src/heap/combined-heap.h", @@ -37,9 +38,9 @@ include_rules = [ "+src/heap/local-heap-inl.h", "+src/heap/pretenuring-handler-inl.h", # TODO(v8:10496): Don't expose memory chunk outside of heap/. + "+src/heap/mutable-page.h", + "+src/heap/mutable-page-inl.h", "+src/heap/memory-chunk.h", - "+src/heap/memory-chunk-inl.h", - "+src/heap/memory-chunk-header.h", "+src/heap/paged-spaces-inl.h", "+src/heap/parked-scope-inl.h", "+src/heap/parked-scope.h", diff --git a/deps/v8/src/api/api-arguments-inl.h b/deps/v8/src/api/api-arguments-inl.h index 563884b90ef738..de2648c8d60528 100644 --- a/deps/v8/src/api/api-arguments-inl.h +++ b/deps/v8/src/api/api-arguments-inl.h @@ -47,6 +47,19 @@ Handle CustomArguments::GetReturnValue(Isolate* isolate) const { return Handle::cast(Handle(slot.location())); } +template +template +Handle CustomArguments::GetReturnValueNoHoleCheck( + Isolate* isolate) const { + // Check the ReturnValue. + FullObjectSlot slot = slot_at(kReturnValueIndex); + // TODO(ishell): remove the hole check once it's no longer possible to set + // return value to the hole. + CHECK(!IsTheHole(*slot, isolate)); + DCHECK(IsApiCallResultType(*slot)); + return Handle::cast(Handle(slot.location())); +} + inline Tagged PropertyCallbackArguments::holder() const { return JSObject::cast(*slot_at(T::kHolderIndex)); } @@ -85,15 +98,15 @@ inline Tagged FunctionCallbackArguments::holder() const { PropertyCallbackInfo callback_info(values_); Handle FunctionCallbackArguments::Call( - Tagged handler) { + Tagged function) { Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback); v8::FunctionCallback f = - reinterpret_cast(handler->callback(isolate)); + reinterpret_cast(function->callback(isolate)); Handle receiver_check_unsupported; if (isolate->should_check_side_effects() && !isolate->debug()->PerformSideEffectCheckForCallback( - handle(handler, isolate))) { + handle(function, isolate))) { return {}; } ExternalCallbackScope call_scope(isolate, FUNCTION_ADDR(f)); @@ -129,11 +142,21 @@ Handle PropertyCallbackArguments::CallNamedQuery( Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedQueryCallback); Handle receiver_check_unsupported; - GenericNamedPropertyQueryCallback f = - ToCData(interceptor->query()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); - f(v8::Utils::ToLocal(name), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertyQueryCallback f = + ToCData(interceptor->query()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); + auto intercepted = f(v8::Utils::ToLocal(name), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + GenericNamedPropertyQueryCallback f = + ToCData(interceptor->query()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); + f(v8::Utils::ToLocal(name), callback_info); + return GetReturnValue(isolate); + } } Handle PropertyCallbackArguments::CallNamedGetter( @@ -141,11 +164,21 @@ Handle PropertyCallbackArguments::CallNamedGetter( DCHECK_NAME_COMPATIBLE(interceptor, name); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedGetterCallback); - GenericNamedPropertyGetterCallback f = - ToCData(interceptor->getter()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); - f(v8::Utils::ToLocal(name), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertyGetterCallback f = + ToCData(interceptor->getter()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + auto intercepted = f(v8::Utils::ToLocal(name), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + GenericNamedPropertyGetterCallback f = + ToCData(interceptor->getter()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + f(v8::Utils::ToLocal(name), callback_info); + return GetReturnValue(isolate); + } } Handle PropertyCallbackArguments::CallNamedDescriptor( @@ -153,53 +186,104 @@ Handle PropertyCallbackArguments::CallNamedDescriptor( DCHECK_NAME_COMPATIBLE(interceptor, name); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDescriptorCallback); - GenericNamedPropertyDescriptorCallback f = - ToCData( - interceptor->descriptor()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); - f(v8::Utils::ToLocal(name), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertyDescriptorCallback f = + ToCData(interceptor->descriptor()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + auto intercepted = f(v8::Utils::ToLocal(name), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + GenericNamedPropertyDescriptorCallback f = + ToCData( + interceptor->descriptor()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + f(v8::Utils::ToLocal(name), callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): just return v8::Intercepted. Handle PropertyCallbackArguments::CallNamedSetter( Handle interceptor, Handle name, Handle value) { DCHECK_NAME_COMPATIBLE(interceptor, name); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedSetterCallback); - GenericNamedPropertySetterCallback f = - ToCData(interceptor->setter()); - Handle has_side_effects; - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); - f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertySetterCallback f = + ToCData(interceptor->setter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects); + auto intercepted = + f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + // Non-empty handle indicates that the request was intercepted. + return isolate->factory()->undefined_value(); + + } else { + GenericNamedPropertySetterCallback f = + ToCData(interceptor->setter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); + f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): just return v8::Intercepted. Handle PropertyCallbackArguments::CallNamedDefiner( Handle interceptor, Handle name, const v8::PropertyDescriptor& desc) { DCHECK_NAME_COMPATIBLE(interceptor, name); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDefinerCallback); - GenericNamedPropertyDefinerCallback f = - ToCData(interceptor->definer()); - Handle has_side_effects; - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); - f(v8::Utils::ToLocal(name), desc, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertyDefinerCallback f = + ToCData(interceptor->definer()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects); + auto intercepted = f(v8::Utils::ToLocal(name), desc, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + // Non-empty handle indicates that the request was intercepted. + return isolate->factory()->undefined_value(); + + } else { + GenericNamedPropertyDefinerCallback f = + ToCData(interceptor->definer()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); + f(v8::Utils::ToLocal(name), desc, callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): return Handle Handle PropertyCallbackArguments::CallNamedDeleter( Handle interceptor, Handle name) { DCHECK_NAME_COMPATIBLE(interceptor, name); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDeleterCallback); - GenericNamedPropertyDeleterCallback f = - ToCData(interceptor->deleter()); - Handle has_side_effects; - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, has_side_effects); - f(v8::Utils::ToLocal(name), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + NamedPropertyDeleterCallback f = + ToCData(interceptor->deleter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, + has_side_effects); + auto intercepted = f(v8::Utils::ToLocal(name), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValue(isolate); + + } else { + GenericNamedPropertyDeleterCallback f = + ToCData(interceptor->deleter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, + has_side_effects); + f(v8::Utils::ToLocal(name), callback_info); + return GetReturnValue(isolate); + } } // ------------------------------------------------------------------------- @@ -217,23 +301,44 @@ Handle PropertyCallbackArguments::CallIndexedQuery( DCHECK(!interceptor->is_named()); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedQueryCallback); - IndexedPropertyQueryCallback f = - ToCData(interceptor->query()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); - f(index, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertyQueryCallbackV2 f = + ToCData(interceptor->query()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); + auto intercepted = f(index, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + IndexedPropertyQueryCallback f = + ToCData(interceptor->query()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Integer, interceptor); + f(index, callback_info); + return GetReturnValue(isolate); + } } Handle PropertyCallbackArguments::CallIndexedGetter( Handle interceptor, uint32_t index) { DCHECK(!interceptor->is_named()); RCS_SCOPE(isolate(), RuntimeCallCounterId::kNamedGetterCallback); - IndexedPropertyGetterCallback f = - ToCData(interceptor->getter()); - Isolate* isolate = this->isolate(); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); - f(index, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertyGetterCallbackV2 f = + ToCData(interceptor->getter()); + Isolate* isolate = this->isolate(); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + auto intercepted = f(index, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + IndexedPropertyGetterCallback f = + ToCData(interceptor->getter()); + Isolate* isolate = this->isolate(); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + f(index, callback_info); + return GetReturnValue(isolate); + } } Handle PropertyCallbackArguments::CallIndexedDescriptor( @@ -241,58 +346,106 @@ Handle PropertyCallbackArguments::CallIndexedDescriptor( DCHECK(!interceptor->is_named()); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDescriptorCallback); - IndexedPropertyDescriptorCallback f = - ToCData(interceptor->descriptor()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); - f(index, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertyDescriptorCallbackV2 f = + ToCData(interceptor->descriptor()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + auto intercepted = f(index, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + IndexedPropertyDescriptorCallback f = + ToCData(interceptor->descriptor()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, interceptor); + f(index, callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): just return v8::Intercepted. Handle PropertyCallbackArguments::CallIndexedSetter( Handle interceptor, uint32_t index, Handle value) { DCHECK(!interceptor->is_named()); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedSetterCallback); - IndexedPropertySetterCallback f = - ToCData(interceptor->setter()); - Handle has_side_effects; - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); - f(index, v8::Utils::ToLocal(value), callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertySetterCallbackV2 f = + ToCData(interceptor->setter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects); + auto intercepted = f(index, v8::Utils::ToLocal(value), callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + // Non-empty handle indicates that the request was intercepted. + return isolate->factory()->undefined_value(); + + } else { + IndexedPropertySetterCallback f = + ToCData(interceptor->setter()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); + f(index, v8::Utils::ToLocal(value), callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): just return v8::Intercepted. Handle PropertyCallbackArguments::CallIndexedDefiner( Handle interceptor, uint32_t index, const v8::PropertyDescriptor& desc) { DCHECK(!interceptor->is_named()); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDefinerCallback); - IndexedPropertyDefinerCallback f = - ToCData(interceptor->definer()); - Handle has_side_effects; - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); - f(index, desc, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertyDefinerCallbackV2 f = + ToCData(interceptor->definer()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects); + auto intercepted = f(index, desc, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + // Non-empty handle indicates that the request was intercepted. + return isolate->factory()->undefined_value(); + + } else { + IndexedPropertyDefinerCallback f = + ToCData(interceptor->definer()); + Handle has_side_effects; + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Value, has_side_effects); + f(index, desc, callback_info); + return GetReturnValue(isolate); + } } +// TODO(ishell): return Handle Handle PropertyCallbackArguments::CallIndexedDeleter( Handle interceptor, uint32_t index) { DCHECK(!interceptor->is_named()); Isolate* isolate = this->isolate(); RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDeleterCallback); - IndexedPropertyDeleterCallback f = - ToCData(interceptor->deleter()); - PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, interceptor); - f(index, callback_info); - return GetReturnValue(isolate); + if (interceptor->has_new_callbacks_signature()) { + IndexedPropertyDeleterCallbackV2 f = + ToCData(interceptor->deleter()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, interceptor); + auto intercepted = f(index, callback_info); + if (intercepted == v8::Intercepted::kNo) return {}; + return GetReturnValueNoHoleCheck(isolate); + + } else { + IndexedPropertyDeleterCallback f = + ToCData(interceptor->deleter()); + PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, interceptor); + f(index, callback_info); + return GetReturnValue(isolate); + } } Handle PropertyCallbackArguments::CallPropertyEnumerator( Handle interceptor) { - // For now there is a single enumerator for indexed and named properties. + // Named and indexed enumerator callbacks have same signatures. + static_assert(std::is_same::value); IndexedPropertyEnumeratorCallback f = v8::ToCData(interceptor->enumerator()); - // TODO(cbruni): assert same type for indexed and named callback. Isolate* isolate = this->isolate(); PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Array, interceptor); f(callback_info); diff --git a/deps/v8/src/api/api-arguments.h b/deps/v8/src/api/api-arguments.h index 18f28ce7bdf8ae..179559d8b251a0 100644 --- a/deps/v8/src/api/api-arguments.h +++ b/deps/v8/src/api/api-arguments.h @@ -42,6 +42,9 @@ class CustomArguments : public CustomArgumentsBase { template Handle GetReturnValue(Isolate* isolate) const; + template + Handle GetReturnValueNoHoleCheck(Isolate* isolate) const; + inline Isolate* isolate() const { return reinterpret_cast((*slot_at(T::kIsolateIndex)).ptr()); } @@ -209,7 +212,7 @@ class FunctionCallbackArguments * and used if it's been set to anything inside the callback. * New style callbacks always use the return value. */ - inline Handle Call(Tagged handler); + inline Handle Call(Tagged function); private: inline Tagged holder() const; diff --git a/deps/v8/src/api/api-natives.cc b/deps/v8/src/api/api-natives.cc index 0f1747a2a911ad..e3ff723b53e38d 100644 --- a/deps/v8/src/api/api-natives.cc +++ b/deps/v8/src/api/api-natives.cc @@ -282,106 +282,6 @@ MaybeHandle ConfigureInstance(Isolate* isolate, Handle obj, return obj; } -// Whether or not to cache every instance: when we materialize a getter or -// setter from an lazy AccessorPair, we rely on this cache to be able to always -// return the same getter or setter. However, objects will be cloned anyways, -// so it's not observable if we didn't cache an instance. Furthermore, a badly -// behaved embedder might create an unlimited number of objects, so we limit -// the cache for those cases. -enum class CachingMode { kLimited, kUnlimited }; - -MaybeHandle ProbeInstantiationsCache( - Isolate* isolate, Handle native_context, int serial_number, - CachingMode caching_mode) { - DCHECK_NE(serial_number, TemplateInfo::kDoNotCache); - if (serial_number == TemplateInfo::kUncached) { - return {}; - } - - if (serial_number < TemplateInfo::kFastTemplateInstantiationsCacheSize) { - Tagged fast_cache = - native_context->fast_template_instantiations_cache(); - Handle object{fast_cache->get(serial_number), isolate}; - if (IsTheHole(*object, isolate)) return {}; - return Handle::cast(object); - } - if (caching_mode == CachingMode::kUnlimited || - (serial_number < TemplateInfo::kSlowTemplateInstantiationsCacheSize)) { - Tagged slow_cache = - native_context->slow_template_instantiations_cache(); - InternalIndex entry = slow_cache->FindEntry(isolate, serial_number); - if (entry.is_found()) { - return handle(JSObject::cast(slow_cache->ValueAt(entry)), isolate); - } - } - return {}; -} - -void CacheTemplateInstantiation(Isolate* isolate, - Handle native_context, - Handle data, - CachingMode caching_mode, - Handle object) { - DCHECK_NE(TemplateInfo::kDoNotCache, data->serial_number()); - - int serial_number = data->serial_number(); - if (serial_number == TemplateInfo::kUncached) { - serial_number = isolate->heap()->GetNextTemplateSerialNumber(); - } - - if (serial_number < TemplateInfo::kFastTemplateInstantiationsCacheSize) { - Handle fast_cache = - handle(native_context->fast_template_instantiations_cache(), isolate); - Handle new_cache = - FixedArray::SetAndGrow(isolate, fast_cache, serial_number, object); - if (*new_cache != *fast_cache) { - native_context->set_fast_template_instantiations_cache(*new_cache); - } - data->set_serial_number(serial_number); - } else if (caching_mode == CachingMode::kUnlimited || - (serial_number < - TemplateInfo::kSlowTemplateInstantiationsCacheSize)) { - Handle cache = - handle(native_context->slow_template_instantiations_cache(), isolate); - auto new_cache = - SimpleNumberDictionary::Set(isolate, cache, serial_number, object); - if (*new_cache != *cache) { - native_context->set_slow_template_instantiations_cache(*new_cache); - } - data->set_serial_number(serial_number); - } else { - // we've overflowed the cache limit, no more caching - data->set_serial_number(TemplateInfo::kDoNotCache); - } -} - -void UncacheTemplateInstantiation(Isolate* isolate, - Handle native_context, - Handle data, - CachingMode caching_mode) { - int serial_number = data->serial_number(); - if (serial_number < 0) return; - - if (serial_number < TemplateInfo::kFastTemplateInstantiationsCacheSize) { - Tagged fast_cache = - native_context->fast_template_instantiations_cache(); - DCHECK(!IsUndefined(fast_cache->get(serial_number), isolate)); - fast_cache->set(serial_number, ReadOnlyRoots{isolate}.undefined_value(), - SKIP_WRITE_BARRIER); - data->set_serial_number(TemplateInfo::kUncached); - } else if (caching_mode == CachingMode::kUnlimited || - (serial_number < - TemplateInfo::kSlowTemplateInstantiationsCacheSize)) { - Handle cache = - handle(native_context->slow_template_instantiations_cache(), isolate); - InternalIndex entry = cache->FindEntry(isolate, serial_number); - DCHECK(entry.is_found()); - cache = SimpleNumberDictionary::DeleteEntry(isolate, cache, entry); - native_context->set_slow_template_instantiations_cache(*cache); - data->set_serial_number(TemplateInfo::kUncached); - } -} - bool IsSimpleInstantiation(Isolate* isolate, Tagged info, Tagged new_target) { DisallowGarbageCollection no_gc; @@ -412,8 +312,9 @@ MaybeHandle InstantiateObject(Isolate* isolate, // Fast path. Handle result; if (should_cache && info->is_cached()) { - if (ProbeInstantiationsCache(isolate, isolate->native_context(), - info->serial_number(), CachingMode::kLimited) + if (TemplateInfo::ProbeInstantiationsCache( + isolate, isolate->native_context(), info->serial_number(), + TemplateInfo::CachingMode::kLimited) .ToHandle(&result)) { return isolate->factory()->CopyJSObject(result); } @@ -457,8 +358,9 @@ MaybeHandle InstantiateObject(Isolate* isolate, JSObject::MigrateSlowToFast(result, 0, "ApiNatives::InstantiateObject"); // Don't cache prototypes. if (should_cache) { - CacheTemplateInstantiation(isolate, isolate->native_context(), info, - CachingMode::kLimited, result); + TemplateInfo::CacheTemplateInstantiation( + isolate, isolate->native_context(), info, + TemplateInfo::CachingMode::kLimited, result); result = isolate->factory()->CopyJSObject(result); } } @@ -495,8 +397,9 @@ MaybeHandle InstantiateFunction( bool should_cache = data->should_cache(); if (should_cache && data->is_cached()) { Handle result; - if (ProbeInstantiationsCache(isolate, native_context, data->serial_number(), - CachingMode::kUnlimited) + if (TemplateInfo::ProbeInstantiationsCache( + isolate, native_context, data->serial_number(), + TemplateInfo::CachingMode::kUnlimited) .ToHandle(&result)) { return Handle::cast(result); } @@ -537,23 +440,24 @@ MaybeHandle InstantiateFunction( if (!data->needs_access_check() && IsUndefined(data->GetNamedPropertyHandler(), isolate) && IsUndefined(data->GetIndexedPropertyHandler(), isolate)) { - function_type = v8_flags.embedder_instance_types && data->HasInstanceType() - ? static_cast(data->InstanceType()) - : JS_API_OBJECT_TYPE; + function_type = v8_flags.embedder_instance_types ? data->GetInstanceType() + : JS_API_OBJECT_TYPE; + DCHECK(InstanceTypeChecker::IsJSApiObject(function_type)); } Handle function = ApiNatives::CreateApiFunction( isolate, native_context, data, prototype, function_type, maybe_name); if (should_cache) { // Cache the function. - CacheTemplateInstantiation(isolate, native_context, data, - CachingMode::kUnlimited, function); + TemplateInfo::CacheTemplateInstantiation( + isolate, native_context, data, TemplateInfo::CachingMode::kUnlimited, + function); } MaybeHandle result = ConfigureInstance(isolate, function, data); if (result.is_null()) { // Uncache on error. - UncacheTemplateInstantiation(isolate, native_context, data, - CachingMode::kUnlimited); + TemplateInfo::UncacheTemplateInstantiation( + isolate, native_context, data, TemplateInfo::CachingMode::kUnlimited); return MaybeHandle(); } data->set_published(true); diff --git a/deps/v8/src/api/api.cc b/deps/v8/src/api/api.cc index 9ca94b045c26c9..a7070c494c211b 100644 --- a/deps/v8/src/api/api.cc +++ b/deps/v8/src/api/api.cc @@ -566,13 +566,23 @@ Isolate* SnapshotCreator::GetIsolate() { } void SnapshotCreator::SetDefaultContext( - Local context, SerializeInternalFieldsCallback callback) { - impl_->SetDefaultContext(Utils::OpenHandle(*context), callback); + Local context, + SerializeInternalFieldsCallback internal_fields_serializer, + SerializeContextDataCallback context_data_serializer) { + impl_->SetDefaultContext( + Utils::OpenHandle(*context), + i::SerializeEmbedderFieldsCallback(internal_fields_serializer, + context_data_serializer)); } -size_t SnapshotCreator::AddContext(Local context, - SerializeInternalFieldsCallback callback) { - return impl_->AddContext(Utils::OpenHandle(*context), callback); +size_t SnapshotCreator::AddContext( + Local context, + SerializeInternalFieldsCallback internal_fields_serializer, + SerializeContextDataCallback context_data_serializer) { + return impl_->AddContext( + Utils::OpenHandle(*context), + i::SerializeEmbedderFieldsCallback(internal_fields_serializer, + context_data_serializer)); } size_t SnapshotCreator::AddData(i::Address object) { @@ -768,33 +778,36 @@ void HandleHelper::VerifyOnMainThread() { #if V8_STATIC_ROOTS_BOOL -// Initialize static root constants exposed in v8-internal.h. +// Check static root constants exposed in v8-internal.h. namespace { constexpr InstanceTypeChecker::TaggedAddressRange kStringMapRange = *InstanceTypeChecker::UniqueMapRangeOfInstanceTypeRange(FIRST_STRING_TYPE, LAST_STRING_TYPE); -constexpr Tagged_t kFirstStringMapPtr = kStringMapRange.first; -constexpr Tagged_t kLastStringMapPtr = kStringMapRange.second; } // namespace -#define EXPORTED_STATIC_ROOTS_MAPPING(V) \ +#define EXPORTED_STATIC_ROOTS_PTR_MAPPING(V) \ V(UndefinedValue, i::StaticReadOnlyRoot::kUndefinedValue) \ V(NullValue, i::StaticReadOnlyRoot::kNullValue) \ V(TrueValue, i::StaticReadOnlyRoot::kTrueValue) \ V(FalseValue, i::StaticReadOnlyRoot::kFalseValue) \ V(EmptyString, i::StaticReadOnlyRoot::kempty_string) \ V(TheHoleValue, i::StaticReadOnlyRoot::kTheHoleValue) \ - V(FirstStringMap, kFirstStringMapPtr) \ - V(LastStringMap, kLastStringMapPtr) + V(FirstStringMap, kStringMapRange.first) \ + V(LastStringMap, kStringMapRange.second) static_assert(std::is_same::value); -#define DEF_STATIC_ROOT(name, internal_value) \ - const Internals::Tagged_t Internals::StaticReadOnlyRoot::k##name = \ - internal_value; -EXPORTED_STATIC_ROOTS_MAPPING(DEF_STATIC_ROOT) -#undef DEF_STATIC_ROOT -#undef EXPORTED_STATIC_ROOTS_MAPPING +// Ensure they have the correct value. +#define CHECK_STATIC_ROOT(name, value) \ + static_assert(Internals::StaticReadOnlyRoot::k##name == value); +EXPORTED_STATIC_ROOTS_PTR_MAPPING(CHECK_STATIC_ROOT) +#undef CHECK_STATIC_ROOT +#define PLUS_ONE(...) +1 +static constexpr int kNumberOfCheckedStaticRoots = + 0 EXPORTED_STATIC_ROOTS_PTR_MAPPING(PLUS_ONE); +#undef EXPORTED_STATIC_ROOTS_PTR_MAPPING +static_assert(Internals::StaticReadOnlyRoot::kNumberOfExportedStaticRoots == + kNumberOfCheckedStaticRoots); #endif // V8_STATIC_ROOTS_BOOL @@ -1130,15 +1143,6 @@ void Context::SetAlignedPointerInEmbedderData(int index, void* value) { // --- T e m p l a t e --- -static void InitializeTemplate(i::Tagged that, int type, - bool do_not_cache) { - that->set_number_of_properties(0); - that->set_tag(type); - int serial_number = - do_not_cache ? i::TemplateInfo::kDoNotCache : i::TemplateInfo::kUncached; - that->set_serial_number(serial_number); -} - void Template::Set(v8::Local name, v8::Local value, v8::PropertyAttribute attribute) { auto templ = Utils::OpenHandle(this); @@ -1172,58 +1176,32 @@ void Template::SetAccessorProperty(v8::Local name, v8::Local getter, v8::Local setter, v8::PropertyAttribute attribute) { - Utils::ApiCheck( - getter.IsEmpty() || - !IsUndefined( - Utils::OpenDirectHandle(*getter)->call_code(kAcquireLoad)), - "v8::Template::SetAccessorProperty", "Getter must have a call handler"); - Utils::ApiCheck( - setter.IsEmpty() || - !IsUndefined( - Utils::OpenDirectHandle(*setter)->call_code(kAcquireLoad)), - "v8::Template::SetAccessorProperty", "Setter must have a call handler"); - auto templ = Utils::OpenHandle(this); auto i_isolate = templ->GetIsolateChecked(); + i::Handle i_getter; + if (!getter.IsEmpty()) { + i_getter = Utils::OpenHandle(*getter); + Utils::ApiCheck(i_getter->has_callback(i_isolate), + "v8::Template::SetAccessorProperty", + "Getter must have a call handler"); + } + i::Handle i_setter; + if (!setter.IsEmpty()) { + i_setter = Utils::OpenHandle(*setter); + Utils::ApiCheck(i_setter->has_callback(i_isolate), + "v8::Template::SetAccessorProperty", + "Setter must have a call handler"); + } ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); DCHECK(!name.IsEmpty()); DCHECK(!getter.IsEmpty() || !setter.IsEmpty()); i::HandleScope scope(i_isolate); i::ApiNatives::AddAccessorProperty( - i_isolate, templ, Utils::OpenHandle(*name), - Utils::OpenHandle(*getter, true), Utils::OpenHandle(*setter, true), + i_isolate, templ, Utils::OpenHandle(*name), i_getter, i_setter, static_cast(attribute)); } // --- F u n c t i o n T e m p l a t e --- -static void InitializeFunctionTemplate(i::Tagged info, - bool do_not_cache) { - InitializeTemplate(info, Consts::FUNCTION_TEMPLATE, do_not_cache); - info->set_flag(0, kRelaxedStore); -} - -namespace { -Local ObjectTemplateNew(i::Isolate* i_isolate, - v8::Local constructor, - bool do_not_cache) { - API_RCS_SCOPE(i_isolate, ObjectTemplate, New); - ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - i::Handle struct_obj = i_isolate->factory()->NewStruct( - i::OBJECT_TEMPLATE_INFO_TYPE, i::AllocationType::kOld); - auto obj = i::Handle::cast(struct_obj); - { - // Disallow GC until all fields of obj have acceptable types. - i::DisallowGarbageCollection no_gc; - i::Tagged raw = *obj; - InitializeTemplate(raw, Consts::OBJECT_TEMPLATE, do_not_cache); - raw->set_data(0); - if (!constructor.IsEmpty()) { - raw->set_constructor(*Utils::OpenDirectHandle(*constructor)); - } - } - return Utils::ToLocal(obj); -} -} // namespace Local FunctionTemplate::PrototypeTemplate() { auto self = Utils::OpenHandle(this); @@ -1233,11 +1211,13 @@ Local FunctionTemplate::PrototypeTemplate() { i_isolate); if (i::IsUndefined(*heap_obj, i_isolate)) { // Do not cache prototype objects. - Local result = - ObjectTemplateNew(i_isolate, Local(), true); + constexpr bool do_not_cache = true; + i::Handle proto_template = + i_isolate->factory()->NewObjectTemplateInfo( + i::Handle(), do_not_cache); i::FunctionTemplateInfo::SetPrototypeTemplate(i_isolate, self, - Utils::OpenHandle(*result)); - return result; + proto_template); + return Utils::ToLocal(proto_template); } return ToApiHandle(heap_obj, i_isolate); } @@ -1267,48 +1247,35 @@ static void EnsureNotPublished(i::DirectHandle info, "FunctionTemplate already instantiated"); } -Local FunctionTemplateNew( +i::Handle FunctionTemplateNew( i::Isolate* i_isolate, FunctionCallback callback, v8::Local data, v8::Local signature, int length, ConstructorBehavior behavior, bool do_not_cache, v8::Local cached_property_name = v8::Local(), SideEffectType side_effect_type = SideEffectType::kHasSideEffect, - const MemorySpan& c_function_overloads = {}, - uint8_t instance_type = 0, - uint8_t allowed_receiver_instance_type_range_start = 0, - uint8_t allowed_receiver_instance_type_range_end = 0) { - i::Handle struct_obj = i_isolate->factory()->NewStruct( - i::FUNCTION_TEMPLATE_INFO_TYPE, i::AllocationType::kOld); - auto obj = i::Handle::cast(struct_obj); + const MemorySpan& c_function_overloads = {}) { + i::Handle obj = + i_isolate->factory()->NewFunctionTemplateInfo(length, do_not_cache); { // Disallow GC until all fields of obj have acceptable types. i::DisallowGarbageCollection no_gc; i::Tagged raw = *obj; - InitializeFunctionTemplate(raw, do_not_cache); - raw->set_length(length); - raw->set_undetectable(false); - raw->set_needs_access_check(false); - raw->set_accept_any_receiver(true); if (!signature.IsEmpty()) { raw->set_signature(*Utils::OpenDirectHandle(*signature)); } - raw->set_cached_property_name( - cached_property_name.IsEmpty() - ? i::ReadOnlyRoots(i_isolate).the_hole_value() - : *Utils::OpenDirectHandle(*cached_property_name)); - if (behavior == ConstructorBehavior::kThrow) + if (!cached_property_name.IsEmpty()) { + raw->set_cached_property_name( + *Utils::OpenDirectHandle(*cached_property_name)); + } + if (behavior == ConstructorBehavior::kThrow) { raw->set_remove_prototype(true); - raw->SetInstanceType(instance_type); - raw->set_allowed_receiver_instance_type_range_start( - allowed_receiver_instance_type_range_start); - raw->set_allowed_receiver_instance_type_range_end( - allowed_receiver_instance_type_range_end); + } } if (callback != nullptr) { Utils::ToLocal(obj)->SetCallHandler(callback, data, side_effect_type, c_function_overloads); } - return Utils::ToLocal(obj); + return obj; } } // namespace @@ -1342,24 +1309,43 @@ Local FunctionTemplate::New( return Local(); } - if (instance_type != 0) { + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + i::Handle templ = FunctionTemplateNew( + i_isolate, callback, data, signature, length, behavior, false, + Local(), side_effect_type, + c_function ? MemorySpan{c_function, 1} + : MemorySpan{}); + + if (instance_type) { if (!Utils::ApiCheck( - instance_type >= i::Internals::kFirstJSApiObjectType && - instance_type <= i::Internals::kLastJSApiObjectType, + base::IsInRange(static_cast(instance_type), + i::Internals::kFirstEmbedderJSApiObjectType, + i::Internals::kLastEmbedderJSApiObjectType), "FunctionTemplate::New", "instance_type is outside the range of valid JSApiObject types")) { return Local(); } + templ->SetInstanceType(instance_type); + } + + if (allowed_receiver_instance_type_range_start || + allowed_receiver_instance_type_range_end) { + if (!Utils::ApiCheck(i::Internals::kFirstEmbedderJSApiObjectType <= + allowed_receiver_instance_type_range_start && + allowed_receiver_instance_type_range_start <= + allowed_receiver_instance_type_range_end && + allowed_receiver_instance_type_range_end <= + i::Internals::kLastEmbedderJSApiObjectType, + "FunctionTemplate::New", + "allowed receiver instance type range is outside the " + "range of valid JSApiObject types")) { + return Local(); + } + templ->SetAllowedReceiverInstanceTypeRange( + allowed_receiver_instance_type_range_start, + allowed_receiver_instance_type_range_end); } - - ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - return FunctionTemplateNew( - i_isolate, callback, data, signature, length, behavior, false, - Local(), side_effect_type, - c_function ? MemorySpan{c_function, 1} - : MemorySpan{}, - instance_type, allowed_receiver_instance_type_range_start, - allowed_receiver_instance_type_range_end); + return Utils::ToLocal(templ); } Local FunctionTemplate::NewWithCFunctionOverloads( @@ -1379,9 +1365,10 @@ Local FunctionTemplate::NewWithCFunctionOverloads( } ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - return FunctionTemplateNew(i_isolate, callback, data, signature, length, - behavior, false, Local(), - side_effect_type, c_function_overloads); + i::Handle templ = FunctionTemplateNew( + i_isolate, callback, data, signature, length, behavior, false, + Local(), side_effect_type, c_function_overloads); + return Utils::ToLocal(templ); } Local FunctionTemplate::NewWithCache( @@ -1391,9 +1378,10 @@ Local FunctionTemplate::NewWithCache( i::Isolate* i_isolate = reinterpret_cast(v8_isolate); API_RCS_SCOPE(i_isolate, FunctionTemplate, NewWithCache); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - return FunctionTemplateNew(i_isolate, callback, data, signature, length, - ConstructorBehavior::kAllow, false, cache_property, - side_effect_type); + i::Handle templ = FunctionTemplateNew( + i_isolate, callback, data, signature, length, ConstructorBehavior::kAllow, + false, cache_property, side_effect_type); + return Utils::ToLocal(templ); } Local Signature::New(Isolate* v8_isolate, @@ -1416,14 +1404,15 @@ void FunctionTemplate::SetCallHandler( i::Isolate* i_isolate = info->GetIsolateChecked(); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); i::HandleScope scope(i_isolate); - i::Handle obj = i_isolate->factory()->NewCallHandlerInfo( - side_effect_type == SideEffectType::kHasNoSideEffect); - obj->set_owner_template(*info); - obj->set_callback(i_isolate, reinterpret_cast(callback)); + info->set_has_side_effects(side_effect_type != + SideEffectType::kHasNoSideEffect); + info->set_callback(i_isolate, reinterpret_cast(callback)); if (data.IsEmpty()) { data = v8::Undefined(reinterpret_cast(i_isolate)); } - obj->set_data(*Utils::OpenDirectHandle(*data)); + // "Release" callback and callback data fields. + info->set_callback_data(*Utils::OpenDirectHandle(*data), kReleaseStore); + if (!c_function_overloads.empty()) { // Stores the data for a sequence of CFunction overloads into a single // FixedArray, as [address_0, signature_0, ... address_n-1, signature_n-1]. @@ -1447,7 +1436,6 @@ void FunctionTemplate::SetCallHandler( i::FunctionTemplateInfo::SetCFunctionOverloads(i_isolate, info, function_overloads); } - info->set_call_code(*obj, kReleaseStore); } namespace { @@ -1490,24 +1478,25 @@ i::Handle MakeAccessorInfo(i::Isolate* i_isolate, } // namespace Local FunctionTemplate::InstanceTemplate() { - auto handle = Utils::OpenHandle(this, true); - if (!Utils::ApiCheck(!handle.is_null(), + auto constructor = Utils::OpenHandle(this, true); + if (!Utils::ApiCheck(!constructor.is_null(), "v8::FunctionTemplate::InstanceTemplate()", "Reading from empty handle")) { return Local(); } - i::Isolate* i_isolate = handle->GetIsolateChecked(); + i::Isolate* i_isolate = constructor->GetIsolateChecked(); ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); - if (i::IsUndefined(handle->GetInstanceTemplate(), i_isolate)) { - Local templ = - ObjectTemplate::New(i_isolate, ToApiHandle(handle)); - i::FunctionTemplateInfo::SetInstanceTemplate(i_isolate, handle, - Utils::OpenHandle(*templ)); - } - return Utils::ToLocal(i::direct_handle(i::ObjectTemplateInfo::cast( - handle->GetInstanceTemplate()), - i_isolate), - i_isolate); + auto maybe_templ = constructor->GetInstanceTemplate(); + if (!i::IsUndefined(maybe_templ, i_isolate)) { + return Utils::ToLocal( + i::direct_handle(i::ObjectTemplateInfo::cast(maybe_templ), i_isolate), + i_isolate); + } + constexpr bool do_not_cache = false; + i::Handle templ = + i_isolate->factory()->NewObjectTemplateInfo(constructor, do_not_cache); + i::FunctionTemplateInfo::SetInstanceTemplate(i_isolate, constructor, templ); + return Utils::ToLocal(templ); } void FunctionTemplate::SetLength(int length) { @@ -1554,12 +1543,14 @@ void FunctionTemplate::RemovePrototype() { Local ObjectTemplate::New( Isolate* v8_isolate, v8::Local constructor) { - return New(reinterpret_cast(v8_isolate), constructor); -} - -Local ObjectTemplate::New( - i::Isolate* i_isolate, v8::Local constructor) { - return ObjectTemplateNew(i_isolate, constructor, false); + auto i_isolate = reinterpret_cast(v8_isolate); + API_RCS_SCOPE(i_isolate, ObjectTemplate, New); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + constexpr bool do_not_cache = false; + i::Handle obj = + i_isolate->factory()->NewObjectTemplateInfo( + Utils::OpenDirectHandle(*constructor, true), do_not_cache); + return Utils::ToLocal(obj); } namespace { @@ -1708,7 +1699,8 @@ template CreateInterceptorInfo( i::Isolate* i_isolate, Getter getter, Setter setter, Query query, Descriptor descriptor, Deleter remover, Enumerator enumerator, - Definer definer, Local data, PropertyHandlerFlags flags) { + Definer definer, Local data, + base::Flags flags) { auto obj = i::Handle::cast(i_isolate->factory()->NewStruct( i::INTERCEPTOR_INFO_TYPE, i::AllocationType::kOld)); @@ -1730,13 +1722,12 @@ i::Handle CreateInterceptorInfo( SET_FIELD_WRAPPED(i_isolate, obj, set_definer, definer); } obj->set_can_intercept_symbols( - !(static_cast(flags) & - static_cast(PropertyHandlerFlags::kOnlyInterceptStrings))); - obj->set_non_masking(static_cast(flags) & - static_cast(PropertyHandlerFlags::kNonMasking)); - obj->set_has_no_side_effect( - static_cast(flags) & - static_cast(PropertyHandlerFlags::kHasNoSideEffect)); + !(flags & PropertyHandlerFlags::kOnlyInterceptStrings)); + obj->set_non_masking(flags & PropertyHandlerFlags::kNonMasking); + obj->set_has_no_side_effect(flags & PropertyHandlerFlags::kHasNoSideEffect); + + obj->set_has_new_callbacks_signature( + flags & PropertyHandlerFlags::kInternalNewCallbacksSignatures); if (data.IsEmpty()) { data = v8::Undefined(reinterpret_cast(i_isolate)); @@ -1750,7 +1741,8 @@ template CreateNamedInterceptorInfo( i::Isolate* i_isolate, Getter getter, Setter setter, Query query, Descriptor descriptor, Deleter remover, Enumerator enumerator, - Definer definer, Local data, PropertyHandlerFlags flags) { + Definer definer, Local data, + base::Flags flags) { auto interceptor = CreateInterceptorInfo(i_isolate, getter, setter, query, descriptor, remover, enumerator, definer, data, flags); @@ -1763,7 +1755,8 @@ template CreateIndexedInterceptorInfo( i::Isolate* i_isolate, Getter getter, Setter setter, Query query, Descriptor descriptor, Deleter remover, Enumerator enumerator, - Definer definer, Local data, PropertyHandlerFlags flags) { + Definer definer, Local data, + base::Flags flags) { auto interceptor = CreateInterceptorInfo(i_isolate, getter, setter, query, descriptor, remover, enumerator, definer, data, flags); @@ -1891,15 +1884,17 @@ void ObjectTemplate::SetCallAsFunctionHandler(FunctionCallback callback, i::HandleScope scope(i_isolate); auto cons = EnsureConstructor(i_isolate, this); EnsureNotPublished(cons, "v8::ObjectTemplate::SetCallAsFunctionHandler"); - i::Handle obj = - i_isolate->factory()->NewCallHandlerInfo(); - obj->set_owner_template(*Utils::OpenDirectHandle(this)); - obj->set_callback(i_isolate, reinterpret_cast(callback)); - if (data.IsEmpty()) { - data = v8::Undefined(reinterpret_cast(i_isolate)); - } - obj->set_data(*Utils::OpenDirectHandle(*data)); - i::FunctionTemplateInfo::SetInstanceCallHandler(i_isolate, cons, obj); + DCHECK_NOT_NULL(callback); + + // This template is just a container for callback and data values and thus + // it's not supposed to be instantiated. Don't cache it. + constexpr bool do_not_cache = true; + constexpr int length = 0; + i::Handle templ = + i_isolate->factory()->NewFunctionTemplateInfo(length, do_not_cache); + templ->set_is_object_template_call_handler(true); + Utils::ToLocal(templ)->SetCallHandler(callback, data); + i::FunctionTemplateInfo::SetInstanceCallHandler(i_isolate, cons, templ); } int ObjectTemplate::InternalFieldCount() const { @@ -2231,10 +2226,6 @@ std::vector Script::GetProducedCompileHints() const { CHECK(IsSmi(item)); result.push_back(i::Smi::ToInt(item)); } - // Clear the data; the embedder can still request more data later, but it'll - // have to keep track of the original data itself. - script->set_compiled_lazy_function_positions( - i::ReadOnlyRoots(i_isolate).undefined_value()); } return result; } @@ -2472,16 +2463,6 @@ MaybeLocal Module::Evaluate(Local context) { RETURN_ESCAPED(result); } -Local Module::CreateSyntheticModule( - Isolate* v8_isolate, Local module_name, - const std::vector>& export_names, - v8::Module::SyntheticModuleEvaluationSteps evaluation_steps) { - return CreateSyntheticModule( - v8_isolate, module_name, - MemorySpan>(export_names.begin(), export_names.end()), - evaluation_steps); -} - Local Module::CreateSyntheticModule( Isolate* v8_isolate, Local module_name, const MemorySpan>& export_names, @@ -2522,33 +2503,6 @@ Maybe Module::SetSyntheticModuleExport(Isolate* v8_isolate, return Just(true); } -std::vector, Local>> -Module::GetStalledTopLevelAwaitMessage(Isolate* isolate) { - auto i_isolate = reinterpret_cast(isolate); - auto self = Utils::OpenDirectHandle(this); - Utils::ApiCheck(i::IsSourceTextModule(*self), - "v8::Module::GetStalledTopLevelAwaitMessage", - "v8::Module::GetStalledTopLevelAwaitMessage must only be " - "called on a SourceTextModule"); - std::vector< - std::tuple, i::Handle>> - stalled_awaits = i::DirectHandle::cast(self) - ->GetStalledTopLevelAwaitMessages(i_isolate); - - std::vector, Local>> result; - size_t stalled_awaits_count = stalled_awaits.size(); - if (stalled_awaits_count == 0) { - return result; - } - result.reserve(stalled_awaits_count); - for (size_t i = 0; i < stalled_awaits_count; ++i) { - auto [module, message] = stalled_awaits[i]; - result.push_back(std::make_tuple(ToApiHandle(module), - ToApiHandle(message))); - } - return result; -} - std::pair, LocalVector> Module::GetStalledTopLevelAwaitMessages(Isolate* isolate) { auto i_isolate = reinterpret_cast(isolate); @@ -3801,10 +3755,17 @@ bool Value::IsTypedArray() const { i::JSTypedArray::cast(obj)->type() == i::kExternal##Type##Array; \ } -TYPED_ARRAYS(VALUE_IS_TYPED_ARRAY) - +TYPED_ARRAYS_BASE(VALUE_IS_TYPED_ARRAY) #undef VALUE_IS_TYPED_ARRAY +bool Value::IsFloat16Array() const { + Utils::ApiCheck(i::v8_flags.js_float16array, "Value::IsFloat16Array", + "Float16Array is not supported"); + auto obj = *Utils::OpenDirectHandle(this); + return i::IsJSTypedArray(obj) && + i::JSTypedArray::cast(obj)->type() == i::kExternalFloat16Array; +} + bool Value::IsDataView() const { auto obj = *Utils::OpenDirectHandle(this); return IsJSDataView(obj) || IsJSRabGsabDataView(obj); @@ -4331,10 +4292,19 @@ void v8::TypedArray::CheckCast(Value* that) { "v8::" #Type "Array::Cast()", "Value is not a " #Type "Array"); \ } -TYPED_ARRAYS(CHECK_TYPED_ARRAY_CAST) - +TYPED_ARRAYS_BASE(CHECK_TYPED_ARRAY_CAST) #undef CHECK_TYPED_ARRAY_CAST +void v8::Float16Array::CheckCast(Value* that) { + Utils::ApiCheck(i::v8_flags.js_float16array, "v8::Float16Array::Cast", + "Float16Array is not supported"); + auto obj = *Utils::OpenHandle(that); + Utils::ApiCheck( + i::IsJSTypedArray(obj) && + i::JSTypedArray::cast(obj)->type() == i::kExternalFloat16Array, + "v8::Float16Array::Cast()", "Value is not a Float16Array"); +} + void v8::DataView::CheckCast(Value* that) { auto obj = *Utils::OpenDirectHandle(that); Utils::ApiCheck(i::IsJSDataView(obj) || IsJSRabGsabDataView(obj), @@ -4861,6 +4831,7 @@ Local v8::Object::FindInstanceInPrototypeChain( i::PrototypeIterator iter(i_isolate, *self, i::kStartAtReceiver); i::Tagged tmpl_info = *Utils::OpenDirectHandle(*tmpl); + if (!IsJSObject(iter.GetCurrent())) return Local(); while (!tmpl_info->IsTemplateFor(iter.GetCurrent())) { iter.Advance(); if (iter.IsAtEnd()) return Local(); @@ -5451,7 +5422,7 @@ MaybeLocal Function::New(Local context, auto templ = FunctionTemplateNew(i_isolate, callback, data, Local(), length, behavior, true, Local(), side_effect_type); - return templ->GetFunction(context); + return Utils::ToLocal(templ)->GetFunction(context); } MaybeLocal Function::NewInstance(Local context, int argc, @@ -5477,15 +5448,12 @@ MaybeLocal Function::NewInstanceWithSideEffectType( if (should_set_has_no_side_effect) { CHECK(IsJSFunction(*self) && i::JSFunction::cast(*self)->shared()->IsApiFunction()); - i::Tagged obj = - i::JSFunction::cast(*self)->shared()->api_func_data()->call_code( - kAcquireLoad); - if (i::IsCallHandlerInfo(obj)) { - i::Tagged handler_info = - i::CallHandlerInfo::cast(obj); - if (handler_info->IsSideEffectCallHandlerInfo()) { + i::Tagged func_data = + i::JSFunction::cast(*self)->shared()->api_func_data(); + if (func_data->has_callback(i_isolate)) { + if (func_data->has_side_effects()) { i_isolate->debug()->IgnoreSideEffectsOnNextCallTo( - handle(handler_info, i_isolate)); + handle(func_data, i_isolate)); } } } @@ -6593,7 +6561,7 @@ struct InvokeBootstrapper { i::MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + i::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, v8::MicrotaskQueue* microtask_queue) { return i_isolate->bootstrapper()->CreateEnvironment( maybe_global_proxy, global_proxy_template, extensions, @@ -6608,7 +6576,7 @@ struct InvokeBootstrapper { i::MaybeHandle maybe_global_proxy, v8::Local global_proxy_template, v8::ExtensionConfiguration* extensions, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + i::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, v8::MicrotaskQueue* microtask_queue) { USE(extensions); USE(context_snapshot_index); @@ -6622,7 +6590,7 @@ static i::Handle CreateEnvironment( i::Isolate* i_isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal maybe_global_template, v8::MaybeLocal maybe_global_proxy, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + i::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, v8::MicrotaskQueue* microtask_queue) { i::Handle result; @@ -6727,7 +6695,7 @@ Local NewContext( v8::Isolate* external_isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal global_template, v8::MaybeLocal global_object, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + i::DeserializeEmbedderFieldsCallback embedder_fields_deserializer, v8::MicrotaskQueue* microtask_queue) { i::Isolate* i_isolate = reinterpret_cast(external_isolate); // TODO(jkummerow): This is for crbug.com/713699. Remove it if it doesn't @@ -6751,27 +6719,34 @@ Local v8::Context::New( v8::Isolate* external_isolate, v8::ExtensionConfiguration* extensions, v8::MaybeLocal global_template, v8::MaybeLocal global_object, - DeserializeInternalFieldsCallback internal_fields_deserializer, - v8::MicrotaskQueue* microtask_queue) { - return NewContext(external_isolate, extensions, global_template, - global_object, 0, internal_fields_deserializer, - microtask_queue); + v8::DeserializeInternalFieldsCallback internal_fields_deserializer, + v8::MicrotaskQueue* microtask_queue, + v8::DeserializeContextDataCallback context_callback_deserializer) { + return NewContext( + external_isolate, extensions, global_template, global_object, 0, + i::DeserializeEmbedderFieldsCallback(internal_fields_deserializer, + context_callback_deserializer), + microtask_queue); } MaybeLocal v8::Context::FromSnapshot( v8::Isolate* external_isolate, size_t context_snapshot_index, - v8::DeserializeInternalFieldsCallback embedder_fields_deserializer, + v8::DeserializeInternalFieldsCallback internal_fields_deserializer, v8::ExtensionConfiguration* extensions, MaybeLocal global_object, - v8::MicrotaskQueue* microtask_queue) { + v8::MicrotaskQueue* microtask_queue, + v8::DeserializeContextDataCallback context_callback_deserializer) { size_t index_including_default_context = context_snapshot_index + 1; if (!i::Snapshot::HasContextSnapshot( reinterpret_cast(external_isolate), index_including_default_context)) { return MaybeLocal(); } - return NewContext(external_isolate, extensions, MaybeLocal(), - global_object, index_including_default_context, - embedder_fields_deserializer, microtask_queue); + return NewContext( + external_isolate, extensions, MaybeLocal(), global_object, + index_including_default_context, + i::DeserializeEmbedderFieldsCallback(internal_fields_deserializer, + context_callback_deserializer), + microtask_queue); } MaybeLocal v8::Context::NewRemoteContext( @@ -6794,7 +6769,7 @@ MaybeLocal v8::Context::NewRemoteContext( "Global template needs to have access check handlers"); i::Handle global_proxy = CreateEnvironment( i_isolate, nullptr, global_template, global_object, 0, - DeserializeInternalFieldsCallback(), nullptr); + i::DeserializeEmbedderFieldsCallback(), nullptr); if (global_proxy.is_null()) { if (i_isolate->has_exception()) i_isolate->clear_exception(); return MaybeLocal(); @@ -6850,6 +6825,7 @@ bool IsJSReceiverSafeToFreeze(i::InstanceType obj_type) { /* Function types */ case i::BIGINT64_TYPED_ARRAY_CONSTRUCTOR_TYPE: case i::BIGUINT64_TYPED_ARRAY_CONSTRUCTOR_TYPE: + case i::FLOAT16_TYPED_ARRAY_CONSTRUCTOR_TYPE: case i::FLOAT32_TYPED_ARRAY_CONSTRUCTOR_TYPE: case i::FLOAT64_TYPED_ARRAY_CONSTRUCTOR_TYPE: case i::INT16_TYPED_ARRAY_CONSTRUCTOR_TYPE: @@ -7219,32 +7195,6 @@ void Context::SetAbortScriptExecution( } } -Local Context::GetContinuationPreservedEmbedderData() const { - auto context = Utils::OpenDirectHandle(this); - i::Isolate* i_isolate = context->GetIsolate(); - -#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - return ToApiHandle( - i::direct_handle( - context->native_context()->continuation_preserved_embedder_data(), - i_isolate), - i_isolate); -#else // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - return v8::Undefined(reinterpret_cast(i_isolate)); -#endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA -} - -void Context::SetContinuationPreservedEmbedderData(Local data) { -#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - auto context = Utils::OpenDirectHandle(this); - i::Isolate* i_isolate = context->GetIsolate(); - if (data.IsEmpty()) - data = v8::Undefined(reinterpret_cast(i_isolate)); - context->native_context()->set_continuation_preserved_embedder_data( - i::HeapObject::cast(*Utils::OpenDirectHandle(*data))); -#endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA -} - void v8::Context::SetPromiseHooks(Local init_hook, Local before_hook, Local after_hook, @@ -9161,9 +9111,48 @@ static_assert(v8::TypedArray::kMaxByteLength == i::JSTypedArray::kMaxByteLength, return Utils::ToLocal##Type##Array(obj); \ } -TYPED_ARRAYS(TYPED_ARRAY_NEW) +TYPED_ARRAYS_BASE(TYPED_ARRAY_NEW) #undef TYPED_ARRAY_NEW +Local Float16Array::New(Local array_buffer, + size_t byte_offset, size_t length) { + Utils::ApiCheck(i::v8_flags.js_float16array, "v8::Float16Array::New", + "Float16Array is not supported"); + i::Isolate* i_isolate = Utils::OpenDirectHandle(*array_buffer)->GetIsolate(); + API_RCS_SCOPE(i_isolate, Float16Array, New); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + if (!Utils::ApiCheck( + length <= kMaxLength, + "v8::Float16Array::New(Local, size_t, size_t)", + "length exceeds max allowed value")) { + return Local(); + } + auto buffer = Utils::OpenHandle(*array_buffer); + i::Handle obj = i_isolate->factory()->NewJSTypedArray( + i::kExternalFloat16Array, buffer, byte_offset, length); + return Utils::ToLocalFloat16Array(obj); +} +Local Float16Array::New( + Local shared_array_buffer, size_t byte_offset, + size_t length) { + Utils::ApiCheck(i::v8_flags.js_float16array, "v8::Float16Array::New", + "Float16Array is not supported"); + i::Isolate* i_isolate = + Utils::OpenDirectHandle(*shared_array_buffer)->GetIsolate(); + API_RCS_SCOPE(i_isolate, Float16Array, New); + ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate); + if (!Utils::ApiCheck( + length <= kMaxLength, + "v8::Float16Array::New(Local, size_t, size_t)", + "length exceeds max allowed value")) { + return Local(); + } + auto buffer = Utils::OpenHandle(*shared_array_buffer); + i::Handle obj = i_isolate->factory()->NewJSTypedArray( + i::kExternalFloat16Array, buffer, byte_offset, length); + return Utils::ToLocalFloat16Array(obj); +} + // TODO(v8:11111): Support creating length tracking DataViews via the API. Local DataView::New(Local array_buffer, size_t byte_offset, size_t byte_length) { @@ -9619,7 +9608,7 @@ void Isolate::RequestGarbageCollectionForTesting(GarbageCollectionType type, base::Optional stack_scope; if (type == kFullGarbageCollection) { stack_scope.emplace(reinterpret_cast(this)->heap(), - i::EmbedderStackStateScope::kExplicitInvocation, + i::EmbedderStackStateOrigin::kExplicitInvocation, stack_state); } RequestGarbageCollectionForTesting(type); @@ -11678,10 +11667,8 @@ inline void InvokeFunctionCallback( ApiCallbackExitFrame* frame = ApiCallbackExitFrame::cast(it.frame()); Tagged fti = FunctionTemplateInfo::cast(frame->target()); - Tagged call_handler_info = - CallHandlerInfo::cast(fti->call_code(kAcquireLoad)); if (!i_isolate->debug()->PerformSideEffectCheckForCallback( - handle(call_handler_info, i_isolate))) { + handle(fti, i_isolate))) { // Failed side effect check. return; } diff --git a/deps/v8/src/api/api.h b/deps/v8/src/api/api.h index bbc42ef09680bd..40b419db777f96 100644 --- a/deps/v8/src/api/api.h +++ b/deps/v8/src/api/api.h @@ -48,15 +48,6 @@ class Script; class EphemeronTable; } // namespace debug -// Constants used in the implementation of the API. The most natural thing -// would usually be to place these with the classes that use them, but -// we want to keep them out of v8.h because it is an externally -// visible file. -class Consts { - public: - enum TemplateType { FUNCTION_TEMPLATE = 0, OBJECT_TEMPLATE = 1 }; -}; - template inline T ToCData(v8::internal::Tagged obj); @@ -160,6 +151,7 @@ class RegisteredExtension { V(Int16Array, JSTypedArray) \ V(Uint32Array, JSTypedArray) \ V(Int32Array, JSTypedArray) \ + V(Float16Array, JSTypedArray) \ V(Float32Array, JSTypedArray) \ V(Float64Array, JSTypedArray) \ V(DataView, JSDataViewOrRabGsabDataView) \ diff --git a/deps/v8/src/asmjs/asm-parser.cc b/deps/v8/src/asmjs/asm-parser.cc index c4d9645fa6c462..c5ff60d671f19d 100644 --- a/deps/v8/src/asmjs/asm-parser.cc +++ b/deps/v8/src/asmjs/asm-parser.cc @@ -369,8 +369,8 @@ void AsmJsParser::ValidateModule() { uint32_t import_index = module_builder_->AddGlobalImport( global_import.import_name, global_import.value_type, false /* mutability */); - start->EmitWithI32V(kExprGlobalGet, import_index); - start->EmitWithI32V(kExprGlobalSet, VarIndex(global_import.var_info)); + start->EmitWithU32V(kExprGlobalGet, import_index); + start->EmitWithU32V(kExprGlobalSet, VarIndex(global_import.var_info)); } start->Emit(kExprEnd); FunctionSig::Builder b(zone(), 0, 0); @@ -961,7 +961,7 @@ void AsmJsParser::ValidateFunctionLocals(size_t param_count, } else { FAIL("Bad local variable definition"); } - current_function_builder_->EmitWithI32V(kExprGlobalGet, + current_function_builder_->EmitWithU32V(kExprGlobalGet, VarIndex(sinfo)); current_function_builder_->EmitSetLocal(info->index); } else if (sinfo->type->IsA(stdlib_fround_)) { @@ -1275,8 +1275,7 @@ void AsmJsParser::BreakStatement() { if (depth < 0) { FAIL("Illegal break"); } - current_function_builder_->Emit(kExprBr); - current_function_builder_->EmitI32V(depth); + current_function_builder_->EmitWithU32V(kExprBr, depth); SkipSemicolon(); } @@ -1292,7 +1291,7 @@ void AsmJsParser::ContinueStatement() { if (depth < 0) { FAIL("Illegal continue"); } - current_function_builder_->EmitWithI32V(kExprBr, depth); + current_function_builder_->EmitWithU32V(kExprBr, depth); SkipSemicolon(); } @@ -1337,9 +1336,9 @@ void AsmJsParser::SwitchStatement() { current_function_builder_->EmitGetLocal(tmp); current_function_builder_->EmitI32Const(c); current_function_builder_->Emit(kExprI32Eq); - current_function_builder_->EmitWithI32V(kExprBrIf, table_pos++); + current_function_builder_->EmitWithU32V(kExprBrIf, table_pos++); } - current_function_builder_->EmitWithI32V(kExprBr, table_pos++); + current_function_builder_->EmitWithU32V(kExprBr, table_pos++); while (!failed_ && Peek(TOK(case))) { current_function_builder_->Emit(kExprEnd); BareEnd(); @@ -1455,7 +1454,7 @@ AsmType* AsmJsParser::Identifier() { if (info->kind != VarKind::kGlobal) { FAILn("Undefined global variable"); } - current_function_builder_->EmitWithI32V(kExprGlobalGet, VarIndex(info)); + current_function_builder_->EmitWithU32V(kExprGlobalGet, VarIndex(info)); return info->type; } UNREACHABLE(); diff --git a/deps/v8/src/ast/scopes.cc b/deps/v8/src/ast/scopes.cc index 581156baf34e6d..39e3a8d5d59acb 100644 --- a/deps/v8/src/ast/scopes.cc +++ b/deps/v8/src/ast/scopes.cc @@ -552,32 +552,35 @@ bool Scope::IsReparsedMemberInitializerScope() const { #endif DeclarationScope* Scope::AsDeclarationScope() { - DCHECK(is_declaration_scope()); + // Here and below: if an attacker corrupts the in-sandox SFI::unique_id or + // fields of a Script object, we can get confused about which type of scope + // we're operating on. These CHECKs defend against that. + SBXCHECK(is_declaration_scope()); return static_cast(this); } const DeclarationScope* Scope::AsDeclarationScope() const { - DCHECK(is_declaration_scope()); + SBXCHECK(is_declaration_scope()); return static_cast(this); } ModuleScope* Scope::AsModuleScope() { - DCHECK(is_module_scope()); + SBXCHECK(is_module_scope()); return static_cast(this); } const ModuleScope* Scope::AsModuleScope() const { - DCHECK(is_module_scope()); + SBXCHECK(is_module_scope()); return static_cast(this); } ClassScope* Scope::AsClassScope() { - DCHECK(is_class_scope()); + SBXCHECK(is_class_scope()); return static_cast(this); } const ClassScope* Scope::AsClassScope() const { - DCHECK(is_class_scope()); + SBXCHECK(is_class_scope()); return static_cast(this); } diff --git a/deps/v8/src/base/build_config.h b/deps/v8/src/base/build_config.h index f25bd3f9b57c43..9ed4c8f10263f0 100644 --- a/deps/v8/src/base/build_config.h +++ b/deps/v8/src/base/build_config.h @@ -72,10 +72,11 @@ constexpr int kPageSizeBits = 18; // to that size needs to be individually protectable via // {base::OS::SetPermission} and friends. #if (defined(V8_OS_MACOS) && defined(V8_HOST_ARCH_ARM64)) || \ - (defined(V8_OS_ANDROID) && defined(V8_HOST_ARCH_ARM64)) || \ + (defined(V8_OS_ANDROID) && \ + (defined(V8_HOST_ARCH_ARM64) || defined(V8_HOST_ARCH_X64))) || \ defined(V8_HOST_ARCH_LOONG64) || defined(V8_HOST_ARCH_MIPS64) || \ defined(V8_OS_IOS) -// Android on arm64 has experimental support for 16kB pages. +// Android 64 bit has experimental support for 16kB pages. // MacOS & iOS on arm64 uses 16kB pages. // LOONG64 and MIPS64 also use 16kB pages. constexpr int kMinimumOSPageSize = 16 * 1024; diff --git a/deps/v8/src/base/macros.h b/deps/v8/src/base/macros.h index 82c0d04550402a..0254d0dae03f63 100644 --- a/deps/v8/src/base/macros.h +++ b/deps/v8/src/base/macros.h @@ -368,9 +368,9 @@ bool is_inbounds(float_t v) { // Setup for Windows shared library export. #define V8_EXPORT_ENUM #ifdef BUILDING_V8_SHARED_PRIVATE -#define V8_EXPORT_PRIVATE +#define V8_EXPORT_PRIVATE __declspec(dllexport) #elif USING_V8_SHARED_PRIVATE -#define V8_EXPORT_PRIVATE +#define V8_EXPORT_PRIVATE __declspec(dllimport) #else #define V8_EXPORT_PRIVATE #endif // BUILDING_V8_SHARED @@ -380,8 +380,8 @@ bool is_inbounds(float_t v) { // Setup for Linux shared library export. #if V8_HAS_ATTRIBUTE_VISIBILITY #ifdef BUILDING_V8_SHARED_PRIVATE -#define V8_EXPORT_PRIVATE -#define V8_EXPORT_ENUM +#define V8_EXPORT_PRIVATE __attribute__((visibility("default"))) +#define V8_EXPORT_ENUM V8_EXPORT_PRIVATE #else #define V8_EXPORT_PRIVATE #define V8_EXPORT_ENUM @@ -427,7 +427,18 @@ bool is_inbounds(float_t v) { #define IF_TARGET_ARCH_64_BIT(V, ...) EXPAND(V(__VA_ARGS__)) #else #define IF_TARGET_ARCH_64_BIT(V, ...) -#endif +#endif // V8_TARGET_ARCH_64_BIT + +// Defines IF_OFFICIAL_BUILD and IF_NO_OFFICIAL_BUILD, to be used in macro lists +// for elements that should only be there in official / non-official builds. +#ifdef OFFICIAL_BUILD +// EXPAND is needed to work around MSVC's broken __VA_ARGS__ expansion. +#define IF_OFFICIAL_BUILD(V, ...) EXPAND(V(__VA_ARGS__)) +#define IF_NO_OFFICIAL_BUILD(V, ...) +#else +#define IF_OFFICIAL_BUILD(V, ...) +#define IF_NO_OFFICIAL_BUILD(V, ...) EXPAND(V(__VA_ARGS__)) +#endif // OFFICIAL_BUILD #ifdef GOOGLE3 // Disable FRIEND_TEST macro in Google3. diff --git a/deps/v8/src/base/numbers/fast-dtoa.cc b/deps/v8/src/base/numbers/fast-dtoa.cc index 87b424c5812a7c..4dee33c98e7261 100644 --- a/deps/v8/src/base/numbers/fast-dtoa.cc +++ b/deps/v8/src/base/numbers/fast-dtoa.cc @@ -10,7 +10,6 @@ #include "src/base/numbers/cached-powers.h" #include "src/base/numbers/diy-fp.h" #include "src/base/numbers/double.h" -#include "src/base/v8-fallthrough.h" namespace v8 { namespace base { @@ -265,7 +264,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 9; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 29: case 28: case 27: @@ -274,7 +273,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 8; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 26: case 25: case 24: @@ -283,7 +282,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 7; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 23: case 22: case 21: @@ -293,7 +292,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 6; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 19: case 18: case 17: @@ -302,7 +301,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 5; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 16: case 15: case 14: @@ -311,7 +310,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 4; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 13: case 12: case 11: @@ -321,7 +320,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 3; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 9: case 8: case 7: @@ -330,7 +329,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 2; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 6: case 5: case 4: @@ -339,7 +338,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 1; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 3: case 2: case 1: @@ -348,7 +347,7 @@ static inline void BiggestPowerTen(uint32_t number, int number_bits, *exponent = 0; break; } - V8_FALLTHROUGH; + [[fallthrough]]; case 0: *power = 0; *exponent = -1; diff --git a/deps/v8/src/base/optional.h b/deps/v8/src/base/optional.h index 26ce8384429bf3..6ad1b6f74293b0 100644 --- a/deps/v8/src/base/optional.h +++ b/deps/v8/src/base/optional.h @@ -16,7 +16,7 @@ namespace base { // These aliases are deprecated, use std::optional directly. template -using Optional = std::optional; +using Optional [[deprecated]] = std::optional; using std::in_place; using std::make_optional; diff --git a/deps/v8/src/base/platform/platform-linux.cc b/deps/v8/src/base/platform/platform-linux.cc index c0e3743410b394..57ef0b431260b1 100644 --- a/deps/v8/src/base/platform/platform-linux.cc +++ b/deps/v8/src/base/platform/platform-linux.cc @@ -218,7 +218,7 @@ std::unique_ptr> ParseProcSelfMaps( } fclose(fp); - if (!error && result->size()) return result; + if (!error && !result->empty()) return result; return nullptr; } diff --git a/deps/v8/src/base/small-vector.h b/deps/v8/src/base/small-vector.h index b1b5d641985dfe..edaab3a7a6b42f 100644 --- a/deps/v8/src/base/small-vector.h +++ b/deps/v8/src/base/small-vector.h @@ -198,12 +198,12 @@ class SmallVector { end_ = begin_ + new_size; } - void resize_and_init(size_t new_size) { + void resize_and_init(size_t new_size, const T& initial_value = {}) { static_assert(std::is_trivially_destructible_v); if (new_size > capacity()) Grow(new_size); T* new_end = begin_ + new_size; if (new_end > end_) { - std::uninitialized_fill(end_, new_end, T{}); + std::uninitialized_fill(end_, new_end, initial_value); } end_ = new_end; } diff --git a/deps/v8/src/base/v8-fallthrough.h b/deps/v8/src/base/v8-fallthrough.h deleted file mode 100644 index a6dc6972d6d21d..00000000000000 --- a/deps/v8/src/base/v8-fallthrough.h +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2018 the V8 project authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef V8_BASE_V8_FALLTHROUGH_H_ -#define V8_BASE_V8_FALLTHROUGH_H_ - -// When clang suggests inserting [[clang::fallthrough]], it first checks if -// it knows of a macro expanding to it, and if so suggests inserting the -// macro. This means that this macro must be used only in code internal -// to v8, so that v8's user code doesn't end up getting suggestions -// for V8_FALLTHROUGH instead of the user-specific fallthrough macro. -// So do not include this header in any of v8's public headers -- only -// use it in src/, not in include/. -#if defined(__clang__) -#define V8_FALLTHROUGH [[clang::fallthrough]] -#else -#define V8_FALLTHROUGH -#endif - -#endif // V8_BASE_V8_FALLTHROUGH_H_ diff --git a/deps/v8/src/baseline/baseline-batch-compiler.cc b/deps/v8/src/baseline/baseline-batch-compiler.cc index e97f4be9000c1a..f934a88bbe81c8 100644 --- a/deps/v8/src/baseline/baseline-batch-compiler.cc +++ b/deps/v8/src/baseline/baseline-batch-compiler.cc @@ -99,9 +99,9 @@ class BaselineBatchCompilerJob { handles_ = isolate->NewPersistentHandles(); tasks_.reserve(batch_size); for (int i = 0; i < batch_size; i++) { - MaybeObject maybe_sfi = task_queue->get(i); + Tagged maybe_sfi = task_queue->get(i); // TODO(victorgomes): Do I need to clear the value? - task_queue->set(i, HeapObjectReference::ClearedValue(isolate)); + task_queue->set(i, ClearedValue(isolate)); Tagged obj; // Skip functions where weak reference is no longer valid. if (!maybe_sfi.GetHeapObjectIfWeak(&obj)) continue; @@ -283,7 +283,7 @@ void BaselineBatchCompiler::EnqueueSFI(Tagged shared) { void BaselineBatchCompiler::Enqueue(Handle shared) { EnsureQueueCapacity(); - compilation_queue_->set(last_index_++, HeapObjectReference::Weak(*shared)); + compilation_queue_->set(last_index_++, MakeWeak(*shared)); } void BaselineBatchCompiler::InstallBatch() { @@ -315,9 +315,9 @@ void BaselineBatchCompiler::CompileBatch(Handle function) { &is_compiled_scope); } for (int i = 0; i < last_index_; i++) { - MaybeObject maybe_sfi = compilation_queue_->get(i); + Tagged maybe_sfi = compilation_queue_->get(i); MaybeCompileFunction(maybe_sfi); - compilation_queue_->set(i, HeapObjectReference::ClearedValue(isolate_)); + compilation_queue_->set(i, ClearedValue(isolate_)); } ClearBatch(); } @@ -368,7 +368,8 @@ bool BaselineBatchCompiler::ShouldCompileBatch( return false; } -bool BaselineBatchCompiler::MaybeCompileFunction(MaybeObject maybe_sfi) { +bool BaselineBatchCompiler::MaybeCompileFunction( + Tagged maybe_sfi) { Tagged heapobj; // Skip functions where the weak reference is no longer valid. if (!maybe_sfi.GetHeapObjectIfWeak(&heapobj)) return false; diff --git a/deps/v8/src/baseline/baseline-batch-compiler.h b/deps/v8/src/baseline/baseline-batch-compiler.h index adfe9dfe4200b9..94aa7b9b03295f 100644 --- a/deps/v8/src/baseline/baseline-batch-compiler.h +++ b/deps/v8/src/baseline/baseline-batch-compiler.h @@ -57,7 +57,7 @@ class BaselineBatchCompiler { // Tries to compile |maybe_sfi|. Returns false if compilation was not possible // (e.g. bytecode was fushed, weak handle no longer valid, ...). - bool MaybeCompileFunction(MaybeObject maybe_sfi); + bool MaybeCompileFunction(Tagged maybe_sfi); Isolate* isolate_; diff --git a/deps/v8/src/baseline/baseline-compiler.cc b/deps/v8/src/baseline/baseline-compiler.cc index 58ea23043da452..b3b76b60ca0dca 100644 --- a/deps/v8/src/baseline/baseline-compiler.cc +++ b/deps/v8/src/baseline/baseline-compiler.cc @@ -59,11 +59,11 @@ namespace internal { namespace baseline { template -Handle BytecodeOffsetTableBuilder::ToBytecodeOffsetTable( +Handle BytecodeOffsetTableBuilder::ToBytecodeOffsetTable( IsolateT* isolate) { - if (bytes_.empty()) return isolate->factory()->empty_byte_array(); - Handle table = isolate->factory()->NewByteArray( - static_cast(bytes_.size()), AllocationType::kOld); + if (bytes_.empty()) return isolate->factory()->empty_trusted_byte_array(); + Handle table = + isolate->factory()->NewTrustedByteArray(static_cast(bytes_.size())); MemCopy(table->begin(), bytes_.data(), bytes_.size()); return table; } @@ -337,7 +337,7 @@ MaybeHandle BaselineCompiler::Build(LocalIsolate* local_isolate) { __ GetCode(local_isolate, &desc); // Allocate the bytecode offset table. - Handle bytecode_offset_table = + Handle bytecode_offset_table = bytecode_offset_table_builder_.ToBytecodeOffsetTable(local_isolate); Factory::CodeBuilder code_builder(local_isolate, desc, CodeKind::BASELINE); @@ -934,6 +934,16 @@ void BaselineCompiler::VisitGetKeyedProperty() { IndexAsTagged(1)); // slot } +void BaselineCompiler::VisitGetEnumeratedKeyedProperty() { + DCHECK(v8_flags.enable_enumerated_keyed_access_bytecode); + CallBuiltin( + RegisterOperand(0), // object + kInterpreterAccumulatorRegister, // key + RegisterOperand(1), // enum index + RegisterOperand(2), // cache type + IndexAsTagged(3)); // slot +} + void BaselineCompiler::VisitLdaModuleVariable() { BaselineAssembler::ScratchRegisterScope scratch_scope(&basm_); Register scratch = scratch_scope.AcquireScratch(); diff --git a/deps/v8/src/baseline/baseline-compiler.h b/deps/v8/src/baseline/baseline-compiler.h index c06fdafddf44c9..ecbb5e68ddcfcc 100644 --- a/deps/v8/src/baseline/baseline-compiler.h +++ b/deps/v8/src/baseline/baseline-compiler.h @@ -37,7 +37,7 @@ class BytecodeOffsetTableBuilder { } template - Handle ToBytecodeOffsetTable(IsolateT* isolate); + Handle ToBytecodeOffsetTable(IsolateT* isolate); void Reserve(size_t size) { bytes_.reserve(size); } diff --git a/deps/v8/src/baseline/bytecode-offset-iterator.cc b/deps/v8/src/baseline/bytecode-offset-iterator.cc index f7a4d7ac66cba7..c0bed57ea2e1bc 100644 --- a/deps/v8/src/baseline/bytecode-offset-iterator.cc +++ b/deps/v8/src/baseline/bytecode-offset-iterator.cc @@ -12,8 +12,8 @@ namespace v8 { namespace internal { namespace baseline { -BytecodeOffsetIterator::BytecodeOffsetIterator(Handle mapping_table, - Handle bytecodes) +BytecodeOffsetIterator::BytecodeOffsetIterator( + Handle mapping_table, Handle bytecodes) : mapping_table_(mapping_table), data_start_address_(mapping_table_->begin()), data_length_(mapping_table_->length()), @@ -26,8 +26,8 @@ BytecodeOffsetIterator::BytecodeOffsetIterator(Handle mapping_table, Initialize(); } -BytecodeOffsetIterator::BytecodeOffsetIterator(Tagged mapping_table, - Tagged bytecodes) +BytecodeOffsetIterator::BytecodeOffsetIterator( + Tagged mapping_table, Tagged bytecodes) : data_start_address_(mapping_table->begin()), data_length_(mapping_table->length()), current_index_(0), diff --git a/deps/v8/src/baseline/bytecode-offset-iterator.h b/deps/v8/src/baseline/bytecode-offset-iterator.h index 06d02207ebc4b7..91919f2d6c9d71 100644 --- a/deps/v8/src/baseline/bytecode-offset-iterator.h +++ b/deps/v8/src/baseline/bytecode-offset-iterator.h @@ -19,10 +19,10 @@ namespace baseline { class V8_EXPORT_PRIVATE BytecodeOffsetIterator { public: - explicit BytecodeOffsetIterator(Handle mapping_table, + explicit BytecodeOffsetIterator(Handle mapping_table, Handle bytecodes); // Non-handlified version for use when no GC can happen. - explicit BytecodeOffsetIterator(Tagged mapping_table, + explicit BytecodeOffsetIterator(Tagged mapping_table, Tagged bytecodes); ~BytecodeOffsetIterator(); @@ -77,7 +77,7 @@ class V8_EXPORT_PRIVATE BytecodeOffsetIterator { return base::VLQDecodeUnsigned(data_start_address_, ¤t_index_); } - Handle mapping_table_; + Handle mapping_table_; uint8_t* data_start_address_; int data_length_; int current_index_; diff --git a/deps/v8/src/bigint/bigint-internal.cc b/deps/v8/src/bigint/bigint-internal.cc index 35a9e5b3f2377f..ae0b202c65710e 100644 --- a/deps/v8/src/bigint/bigint-internal.cc +++ b/deps/v8/src/bigint/bigint-internal.cc @@ -52,7 +52,12 @@ void ProcessorImpl::Multiply(RWDigits Z, Digits X, Digits Y) { void ProcessorImpl::Divide(RWDigits Q, Digits A, Digits B) { A.Normalize(); B.Normalize(); - DCHECK(B.len() > 0); + // While callers are not required to normalize inputs, they must not + // provide divisors that normalize to zero. + // This must be a Release-mode CHECK because it is load bearing for + // security fuzzing: subsequent operations would perform illegal memory + // accesses if they attempted to work with zero divisors. + CHECK(B.len() > 0); int cmp = Compare(A, B); if (cmp < 0) return Q.Clear(); if (cmp == 0) { @@ -82,7 +87,12 @@ void ProcessorImpl::Divide(RWDigits Q, Digits A, Digits B) { void ProcessorImpl::Modulo(RWDigits R, Digits A, Digits B) { A.Normalize(); B.Normalize(); - DCHECK(B.len() > 0); + // While callers are not required to normalize inputs, they must not + // provide divisors that normalize to zero. + // This must be a Release-mode CHECK because it is load bearing for + // security fuzzing: subsequent operations would perform illegal memory + // accesses if they attempted to work with zero divisors. + CHECK(B.len() > 0); int cmp = Compare(A, B); if (cmp < 0) { for (int i = 0; i < B.len(); i++) R[i] = B[i]; diff --git a/deps/v8/src/builtins/arm/builtins-arm.cc b/deps/v8/src/builtins/arm/builtins-arm.cc index 79124bd196a57f..ce2a0ac3b728a7 100644 --- a/deps/v8/src/builtins/arm/builtins-arm.cc +++ b/deps/v8/src/builtins/arm/builtins-arm.cc @@ -3167,7 +3167,7 @@ class RegisterAllocator { while (it != allocated_registers_.end()) { if (registerIsAvailable(**it)) { **it = no_reg; - allocated_registers_.erase(it); + it = allocated_registers_.erase(it); } else { it++; } @@ -3947,7 +3947,8 @@ void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, __ PrepareCallCFunction(2); __ Move(kCArgRegs[0], ER::isolate_address(masm->isolate())); __ Move(kCArgRegs[1], kOldSPRegister); - __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2); + __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2, + SetIsolateDataSlots::kNo); __ Move(central_stack_sp, kReturnRegister0); __ Pop(argv_input); __ Pop(target_input); @@ -3980,7 +3981,8 @@ void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) { __ Push(kReturnRegister0, kReturnRegister1); __ PrepareCallCFunction(1); __ Move(kCArgRegs[0], ER::isolate_address(masm->isolate())); - __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1); + __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1, + SetIsolateDataSlots::kNo); __ Pop(kReturnRegister0, kReturnRegister1); } @@ -4120,7 +4122,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ mov(r0, Operand(0)); __ mov(r1, Operand(0)); __ Move(r2, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -4283,7 +4285,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -4352,7 +4355,9 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // kData. switch (mode) { case CallApiCallbackMode::kGeneric: - __ ldr(scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + __ ldr( + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); __ str(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -4405,13 +4410,11 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ ldr(scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ str(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ str(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ ldr(api_function_address, - FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset)); + FieldMemOperand( + callback, FunctionTemplateInfo::kMaybeRedirectedCallbackOffset)); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -4521,10 +4524,15 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mov(name_arg, scratch); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ mov(name_arg, sp); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) - __ add(property_callback_info_arg, name_arg, Operand(1 * kPointerSize)); + __ add(property_callback_info_arg, sp, Operand(1 * kPointerSize)); constexpr int kNameOnStackSize = 1; constexpr int kStackUnwindSpace = PCA::kArgsLength + kNameOnStackSize; diff --git a/deps/v8/src/builtins/arm64/builtins-arm64.cc b/deps/v8/src/builtins/arm64/builtins-arm64.cc index 5c607660fb913a..6f874055dfeb29 100644 --- a/deps/v8/src/builtins/arm64/builtins-arm64.cc +++ b/deps/v8/src/builtins/arm64/builtins-arm64.cc @@ -3678,7 +3678,7 @@ class RegisterAllocator { while (it != allocated_registers_.end()) { if (available_.IncludesAliasOf(**it)) { **it = no_reg; - allocated_registers_.erase(it); + it = allocated_registers_.erase(it); } else { it++; } @@ -4441,7 +4441,8 @@ void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, Register argc_input, __ Push(argc_input, target_input, argv_input, padreg); __ Mov(kCArgRegs[0], ER::isolate_address(masm->isolate())); __ Mov(kCArgRegs[1], kOldSPRegister); - __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2); + __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2, + SetIsolateDataSlots::kNo); __ Mov(central_stack_sp, kReturnRegister0); __ Pop(padreg, argv_input, target_input, argc_input); } @@ -4471,7 +4472,8 @@ void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) { { __ Push(kReturnRegister0, kReturnRegister1); __ Mov(kCArgRegs[0], ER::isolate_address(masm->isolate())); - __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1); + __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1, + SetIsolateDataSlots::kNo); __ Pop(kReturnRegister1, kReturnRegister0); } @@ -4622,7 +4624,8 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ Mov(x0, 0); // argc. __ Mov(x1, 0); // argv. __ Mov(x2, ER::isolate_address(masm->isolate())); - __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3); + __ CallCFunction(ER::Create(Runtime::kUnwindAndFindExceptionHandler), 3, + SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -4772,7 +4775,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -4843,7 +4847,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, switch (mode) { case CallApiCallbackMode::kGeneric: __ LoadTaggedField( - scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); __ Str(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -4897,16 +4902,13 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ LoadTaggedField( - scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ Str(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ Str(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ LoadExternalPointerField( api_function_address, FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset), - kCallHandlerInfoCallbackTag); + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset), + kFunctionTemplateInfoCallbackTag); __ EnterExitFrame(scratch, kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -5041,8 +5043,14 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mov(name_arg, name); + USE(kNameStackIndex); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ Add(name_arg, sp, Operand(kNameStackIndex * kSystemPointerSize)); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ Add(property_callback_info_arg, sp, Operand(kPCAStackIndex * kSystemPointerSize)); diff --git a/deps/v8/src/builtins/array-join.tq b/deps/v8/src/builtins/array-join.tq index d85dc1ab48319b..5d0edc48e9d328 100644 --- a/deps/v8/src/builtins/array-join.tq +++ b/deps/v8/src/builtins/array-join.tq @@ -443,6 +443,8 @@ transitioning ArrayJoin( if (IsElementsKindGreaterThan(kind, ElementsKind::UINT32_ELEMENTS)) { if (kind == ElementsKind::INT32_ELEMENTS) { loadFn = LoadJoinTypedElement; + } else if (kind == ElementsKind::FLOAT16_ELEMENTS) { + loadFn = LoadJoinTypedElement; } else if (kind == ElementsKind::FLOAT32_ELEMENTS) { loadFn = LoadJoinTypedElement; } else if (kind == ElementsKind::FLOAT64_ELEMENTS) { @@ -465,6 +467,8 @@ transitioning ArrayJoin( loadFn = LoadJoinTypedElement; } else if (kind == ElementsKind::RAB_GSAB_INT32_ELEMENTS) { loadFn = LoadJoinTypedElement; + } else if (kind == ElementsKind::RAB_GSAB_FLOAT16_ELEMENTS) { + loadFn = LoadJoinTypedElement; } else if (kind == ElementsKind::RAB_GSAB_FLOAT32_ELEMENTS) { loadFn = LoadJoinTypedElement; } else if (kind == ElementsKind::RAB_GSAB_FLOAT64_ELEMENTS) { diff --git a/deps/v8/src/builtins/base.tq b/deps/v8/src/builtins/base.tq index 090e2ee31ad20d..02812274b79e58 100644 --- a/deps/v8/src/builtins/base.tq +++ b/deps/v8/src/builtins/base.tq @@ -123,6 +123,7 @@ type int64 generates 'TNode' constexpr 'int64_t'; type uint64 generates 'TNode' constexpr 'uint64_t'; type intptr generates 'TNode' constexpr 'intptr_t'; type uintptr generates 'TNode' constexpr 'uintptr_t'; +type float16 generates 'TNode' constexpr 'uint16_t'; type float32 generates 'TNode' constexpr 'float'; type float64 generates 'TNode' constexpr 'double'; type bool generates 'TNode' constexpr 'bool'; @@ -327,6 +328,7 @@ extern enum ElementsKind extends int32 { INT16_ELEMENTS, UINT32_ELEMENTS, INT32_ELEMENTS, + FLOAT16_ELEMENTS, FLOAT32_ELEMENTS, FLOAT64_ELEMENTS, UINT8_CLAMPED_ELEMENTS, @@ -338,6 +340,7 @@ extern enum ElementsKind extends int32 { RAB_GSAB_INT16_ELEMENTS, RAB_GSAB_UINT32_ELEMENTS, RAB_GSAB_INT32_ELEMENTS, + RAB_GSAB_FLOAT16_ELEMENTS, RAB_GSAB_FLOAT32_ELEMENTS, RAB_GSAB_FLOAT64_ELEMENTS, RAB_GSAB_UINT8_CLAMPED_ELEMENTS, @@ -482,6 +485,7 @@ extern enum MessageTemplate { kFlattenPastSafeLength, kStrictReadOnlyProperty, kInvalidUsingInForInLoop, + kIllegalInvocation, ... } @@ -1005,7 +1009,7 @@ macro Float64IsNaN(n: float64): bool { // The type of all tagged values that can safely be compared with TaggedEqual. @if(V8_ENABLE_WEBASSEMBLY) type TaggedWithIdentity = JSReceiver|FixedArrayBase|Oddball|Hole|Map|WeakCell| - Context|EmptyString|Symbol|WasmInternalFunction|WasmNull; + Context|EmptyString|Symbol|WasmFuncRef|WasmNull; @ifnot(V8_ENABLE_WEBASSEMBLY) type TaggedWithIdentity = JSReceiver|FixedArrayBase|Oddball|Hole|Map|WeakCell| Context|EmptyString|Symbol; @@ -1294,9 +1298,9 @@ extern macro IntPtrRoundUpToPowerOfTwo32(intptr): intptr; extern macro ChangeFloat32ToFloat64(float32): float64; extern macro RoundInt32ToFloat32(int32): float32; extern macro ChangeNumberToFloat64(Number): float64; -extern macro ChangeNumberToUint32(Number): uint32; extern macro ChangeTaggedNonSmiToInt32( implicit context: Context)(HeapObject): int32; +extern macro ChangeFloat16ToFloat64(float16): float64; extern macro ChangeFloat32ToTagged(float32): Number; extern macro ChangeTaggedToFloat64(implicit context: Context)(JSAny): float64; extern macro ChangeFloat64ToTagged(float64): Number; @@ -1310,6 +1314,8 @@ extern macro ChangeInt32ToInt64(int32): int64; // Sign-extends. extern macro ChangeUint32ToUint64(uint32): uint64; // Doesn't sign-extend. extern macro LoadNativeContext(Context): NativeContext; extern macro GetContinuationPreservedEmbedderData(): Object; +extern macro TruncateFloat64ToFloat16(float64): float16; +extern macro TruncateFloat32ToFloat16(float32): float16; extern macro TruncateFloat64ToFloat32(float64): float32; extern macro TruncateHeapNumberValueToWord32(HeapNumber): int32; extern macro LoadJSArrayElementsMap( @@ -1350,6 +1356,8 @@ extern macro PointerConstant(constexpr RawPtr): RawPtr; extern macro SingleCharacterStringConstant(constexpr string): String; extern macro Float64SilenceNaN(float64): float64; +extern macro BitcastFloat16ToUint32(float16): uint32; +extern macro BitcastUint32ToFloat16(uint32): float16; extern macro BitcastWordToTaggedSigned(intptr): Smi; extern macro BitcastWordToTaggedSigned(uintptr): Smi; extern macro BitcastWordToTagged(intptr): Object; diff --git a/deps/v8/src/builtins/builtins-api.cc b/deps/v8/src/builtins/builtins-api.cc index 6b01c1716834bd..87c3526d20ace4 100644 --- a/deps/v8/src/builtins/builtins-api.cc +++ b/deps/v8/src/builtins/builtins-api.cc @@ -103,15 +103,11 @@ V8_WARN_UNUSED_RESULT MaybeHandle HandleApiCallHelper( } } - Tagged raw_call_data = fun_data->call_code(kAcquireLoad); - if (!IsUndefined(raw_call_data, isolate)) { - DCHECK(IsCallHandlerInfo(raw_call_data)); - Tagged call_data = CallHandlerInfo::cast(raw_call_data); - Tagged data_obj = call_data->data(); - + if (fun_data->has_callback(isolate)) { + Tagged data_obj = fun_data->callback_data(kAcquireLoad); FunctionCallbackArguments custom(isolate, data_obj, raw_holder, *new_target, argv, argc); - Handle result = custom.Call(call_data); + Handle result = custom.Call(*fun_data); RETURN_EXCEPTION_IF_EXCEPTION(isolate, Object); if (result.is_null()) { @@ -236,16 +232,18 @@ HandleApiCallAsFunctionOrConstructorDelegate(Isolate* isolate, Tagged handler = constructor->shared()->api_func_data()->GetInstanceCallHandler(); DCHECK(!IsUndefined(handler, isolate)); - Tagged call_data = CallHandlerInfo::cast(handler); + Tagged templ = FunctionTemplateInfo::cast(handler); + DCHECK(templ->is_object_template_call_handler()); + DCHECK(templ->has_callback(isolate)); // Get the data for the call and perform the callback. Tagged result; { HandleScope scope(isolate); FunctionCallbackArguments custom( - isolate, call_data->data(), obj, new_target, + isolate, templ->callback_data(kAcquireLoad), obj, new_target, args.address_of_first_argument(), args.length() - 1); - Handle result_handle = custom.Call(call_data); + Handle result_handle = custom.Call(templ); if (result_handle.is_null()) { result = ReadOnlyRoots(isolate).undefined_value(); } else { diff --git a/deps/v8/src/builtins/builtins-array-gen.cc b/deps/v8/src/builtins/builtins-array-gen.cc index 2ce9660930c132..3b479510702a3c 100644 --- a/deps/v8/src/builtins/builtins-array-gen.cc +++ b/deps/v8/src/builtins/builtins-array-gen.cc @@ -1406,9 +1406,9 @@ TF_BUILTIN(ArrayIteratorPrototypeNext, CodeStubAssembler) { // Check that the {index} is within range for the {array}. We handle all // kinds of JSArray's here, so we do the computation on Uint32. - TNode index32 = ChangeNumberToUint32(index); + TNode index32 = ChangeNonNegativeNumberToUint32(index); TNode length32 = - ChangeNumberToUint32(LoadJSArrayLength(CAST(array))); + ChangeNonNegativeNumberToUint32(LoadJSArrayLength(CAST(array))); GotoIfNot(Uint32LessThan(index32, length32), &set_done); StoreJSArrayIteratorNextIndex( iterator, ChangeUint32ToTagged(Uint32Add(index32, Uint32Constant(1)))); diff --git a/deps/v8/src/builtins/builtins-atomics-synchronization.cc b/deps/v8/src/builtins/builtins-atomics-synchronization.cc index cd09c218b88911..a28af5409f9b33 100644 --- a/deps/v8/src/builtins/builtins-atomics-synchronization.cc +++ b/deps/v8/src/builtins/builtins-atomics-synchronization.cc @@ -265,7 +265,7 @@ BUILTIN(AtomicsConditionNotify) { Handle js_condition = Handle::cast(js_condition_obj); return *isolate->factory()->NewNumberFromUint( - js_condition->Notify(isolate, count)); + JSAtomicsCondition::Notify(isolate, js_condition, count)); } } // namespace internal diff --git a/deps/v8/src/builtins/builtins-call-gen.cc b/deps/v8/src/builtins/builtins-call-gen.cc index 580a02d3449164..c3bd2c9081f23d 100644 --- a/deps/v8/src/builtins/builtins-call-gen.cc +++ b/deps/v8/src/builtins/builtins-call-gen.cc @@ -717,9 +717,8 @@ void CallOrConstructBuiltinsAssembler::CallFunctionTemplate( GotoIfNot(IsSetWord32( LoadMapBitField(receiver_map)), &receiver_done); - TNode function_template_info_flags = - LoadAndUntagToWord32ObjectField(function_template_info, - FunctionTemplateInfo::kFlagOffset); + TNode function_template_info_flags = LoadObjectField( + function_template_info, FunctionTemplateInfo::kFlagOffset); Branch(IsSetWord32( function_template_info_flags), &receiver_done, &receiver_needs_access_check); @@ -772,39 +771,37 @@ void CallOrConstructBuiltinsAssembler::CallFunctionTemplate( } } - TNode call_code = CAST(LoadObjectField( - function_template_info, FunctionTemplateInfo::kCallCodeOffset)); + TNode callback_data = LoadObjectField( + function_template_info, FunctionTemplateInfo::kCallbackDataOffset); // If the function doesn't have an associated C++ code to execute, just // return the receiver as would an empty function do (see // HandleApiCallHelper). { Label if_continue(this); - GotoIfNot(IsUndefined(call_code), &if_continue); + GotoIfNot(IsTheHole(callback_data), &if_continue); args.PopAndReturn(receiver); Bind(&if_continue); } // Perform the actual API callback invocation via CallApiCallback. - TNode call_handler_info = CAST(call_code); switch (mode) { case CallFunctionTemplateMode::kGeneric: TailCallBuiltin(Builtin::kCallApiCallbackGeneric, context, TruncateIntPtrToInt32(args.GetLengthWithoutReceiver()), - topmost_script_having_context, call_handler_info, holder); + topmost_script_having_context, function_template_info, + holder); break; case CallFunctionTemplateMode::kCheckAccess: case CallFunctionTemplateMode::kCheckAccessAndCompatibleReceiver: case CallFunctionTemplateMode::kCheckCompatibleReceiver: { TNode callback_address = - LoadCallHandlerInfoJsCallbackPtr(call_handler_info); - TNode call_data = - LoadObjectField(call_handler_info, CallHandlerInfo::kDataOffset); + LoadFunctionTemplateInfoJsCallbackPtr(function_template_info); TailCallBuiltin(Builtin::kCallApiCallbackOptimized, context, callback_address, TruncateIntPtrToInt32(args.GetLengthWithoutReceiver()), - call_data, holder); + callback_data, holder); break; } } diff --git a/deps/v8/src/builtins/builtins-call-gen.h b/deps/v8/src/builtins/builtins-call-gen.h index 951016ab9d38d1..c618f61bb17b0a 100644 --- a/deps/v8/src/builtins/builtins-call-gen.h +++ b/deps/v8/src/builtins/builtins-call-gen.h @@ -75,7 +75,6 @@ class CallOrConstructBuiltinsAssembler : public CodeStubAssembler { const LazyNode& feedback_vector, TNode slot); - private: TNode GetCompatibleReceiver(TNode receiver, TNode signature, TNode context); diff --git a/deps/v8/src/builtins/builtins-collections-gen.cc b/deps/v8/src/builtins/builtins-collections-gen.cc index 6fea5c37e8c2f2..e5e6026ce61632 100644 --- a/deps/v8/src/builtins/builtins-collections-gen.cc +++ b/deps/v8/src/builtins/builtins-collections-gen.cc @@ -2782,9 +2782,10 @@ TNode WeakCollectionsBuiltinsAssembler::ShouldShrink( TNode WeakCollectionsBuiltinsAssembler::ValueIndexFromKeyIndex( TNode key_index) { - return IntPtrAdd(key_index, - IntPtrConstant(EphemeronHashTable::ShapeT::kEntryValueIndex - - EphemeronHashTable::kEntryKeyIndex)); + return IntPtrAdd( + key_index, + IntPtrConstant(EphemeronHashTable::TodoShape::kEntryValueIndex - + EphemeronHashTable::kEntryKeyIndex)); } TF_BUILTIN(WeakMapConstructor, WeakCollectionsBuiltinsAssembler) { diff --git a/deps/v8/src/builtins/builtins-definitions.h b/deps/v8/src/builtins/builtins-definitions.h index 8cdba0d032eea4..cf1ec0d4ad31f0 100644 --- a/deps/v8/src/builtins/builtins-definitions.h +++ b/deps/v8/src/builtins/builtins-definitions.h @@ -162,9 +162,9 @@ namespace internal { ASM(JSConstructEntry, JSEntry) \ ASM(JSRunMicrotasksEntry, RunMicrotasksEntry) \ /* Call a JSValue. */ \ - ASM(JSEntryTrampoline, JSTrampoline) \ + ASM(JSEntryTrampoline, JSEntry) \ /* Construct a JSValue. */ \ - ASM(JSConstructEntryTrampoline, JSTrampoline) \ + ASM(JSConstructEntryTrampoline, JSEntry) \ ASM(ResumeGeneratorTrampoline, ResumeGenerator) \ \ /* String helpers */ \ @@ -657,9 +657,11 @@ namespace internal { TFH(LoadSuperIC, LoadWithReceiverAndVector) \ TFH(LoadSuperICBaseline, LoadWithReceiverBaseline) \ TFH(KeyedLoadIC, KeyedLoadWithVector) \ + TFH(EnumeratedKeyedLoadIC, EnumeratedKeyedLoad) \ TFH(KeyedLoadIC_Megamorphic, KeyedLoadWithVector) \ TFH(KeyedLoadICTrampoline, KeyedLoad) \ TFH(KeyedLoadICBaseline, KeyedLoadBaseline) \ + TFH(EnumeratedKeyedLoadICBaseline, EnumeratedKeyedLoadBaseline) \ TFH(KeyedLoadICTrampoline_Megamorphic, KeyedLoad) \ TFH(StoreGlobalIC, StoreGlobalWithVector) \ TFH(StoreGlobalICTrampoline, StoreGlobal) \ @@ -1014,8 +1016,8 @@ namespace internal { IF_WASM(TFC, WasmToJsWrapperCSA, WasmToJSWrapper) \ IF_WASM(TFC, WasmToJsWrapperInvalidSig, WasmToJSWrapper) \ IF_WASM(ASM, WasmSuspend, WasmSuspend) \ - IF_WASM(ASM, WasmResume, WasmDummy) \ - IF_WASM(ASM, WasmReject, WasmDummy) \ + IF_WASM(ASM, WasmResume, WasmDummyWithJSLinkage) \ + IF_WASM(ASM, WasmReject, WasmDummyWithJSLinkage) \ IF_WASM(ASM, WasmTrapHandlerLandingPad, WasmDummy) \ IF_WASM(ASM, WasmCompileLazy, WasmDummy) \ IF_WASM(ASM, WasmLiftoffFrameSetup, WasmDummy) \ diff --git a/deps/v8/src/builtins/builtins-handler-gen.cc b/deps/v8/src/builtins/builtins-handler-gen.cc index 107c710bad9b55..ae9594599810be 100644 --- a/deps/v8/src/builtins/builtins-handler-gen.cc +++ b/deps/v8/src/builtins/builtins-handler-gen.cc @@ -212,6 +212,7 @@ TF_BUILTIN(ElementsTransitionAndStore_NoTransitionHandleCOW, V(INT16_ELEMENTS) \ V(UINT32_ELEMENTS) \ V(INT32_ELEMENTS) \ + V(FLOAT16_ELEMENTS) \ V(FLOAT32_ELEMENTS) \ V(FLOAT64_ELEMENTS) \ V(UINT8_CLAMPED_ELEMENTS) \ @@ -223,6 +224,7 @@ TF_BUILTIN(ElementsTransitionAndStore_NoTransitionHandleCOW, V(RAB_GSAB_INT16_ELEMENTS) \ V(RAB_GSAB_UINT32_ELEMENTS) \ V(RAB_GSAB_INT32_ELEMENTS) \ + V(RAB_GSAB_FLOAT16_ELEMENTS) \ V(RAB_GSAB_FLOAT32_ELEMENTS) \ V(RAB_GSAB_FLOAT64_ELEMENTS) \ V(RAB_GSAB_UINT8_CLAMPED_ELEMENTS) \ diff --git a/deps/v8/src/builtins/builtins-ic-gen.cc b/deps/v8/src/builtins/builtins-ic-gen.cc index da645619dc5a74..0bbcbd858887c1 100644 --- a/deps/v8/src/builtins/builtins-ic-gen.cc +++ b/deps/v8/src/builtins/builtins-ic-gen.cc @@ -53,6 +53,16 @@ void Builtins::Generate_KeyedLoadIC(compiler::CodeAssemblerState* state) { AccessorAssembler assembler(state); assembler.GenerateKeyedLoadIC(); } +void Builtins::Generate_EnumeratedKeyedLoadIC( + compiler::CodeAssemblerState* state) { + AccessorAssembler assembler(state); + assembler.GenerateEnumeratedKeyedLoadIC(); +} +void Builtins::Generate_EnumeratedKeyedLoadICBaseline( + compiler::CodeAssemblerState* state) { + AccessorAssembler assembler(state); + assembler.GenerateEnumeratedKeyedLoadICBaseline(); +} void Builtins::Generate_KeyedLoadIC_Megamorphic( compiler::CodeAssemblerState* state) { AccessorAssembler assembler(state); diff --git a/deps/v8/src/builtins/builtins-internal-gen.cc b/deps/v8/src/builtins/builtins-internal-gen.cc index 9b3c69a4dbf5ee..40c6a5b4135235 100644 --- a/deps/v8/src/builtins/builtins-internal-gen.cc +++ b/deps/v8/src/builtins/builtins-internal-gen.cc @@ -11,7 +11,7 @@ #include "src/codegen/macro-assembler-inl.h" #include "src/common/globals.h" #include "src/execution/frame-constants.h" -#include "src/heap/memory-chunk.h" +#include "src/heap/mutable-page.h" #include "src/ic/accessor-assembler.h" #include "src/ic/keyed-store-generic.h" #include "src/logging/counters.h" @@ -137,7 +137,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { } TNode IsPageFlagSet(TNode object, int mask) { - TNode header = PageHeaderFromAddress(object); + TNode header = MemoryChunkFromAddress(object); TNode flags = UncheckedCast( Load(MachineType::Pointer(), header, IntPtrConstant(MemoryChunkLayout::kFlagsOffset))); @@ -155,7 +155,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { void GetMarkBit(TNode object, TNode* cell, TNode* mask) { - TNode page = PageFromAddress(object); + TNode page = PageMetadataFromAddress(object); TNode bitmap = IntPtrAdd( page, IntPtrConstant(MemoryChunkLayout::kMarkingBitmapOffset)); @@ -165,10 +165,10 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { int shift = MarkingBitmap::kBitsPerCellLog2 + kTaggedSizeLog2 - MarkingBitmap::kBytesPerCellLog2; r0 = WordShr(object, IntPtrConstant(shift)); - r0 = WordAnd( - r0, IntPtrConstant( - (MemoryChunkHeader::GetAlignmentMaskForAssembler() >> shift) & - ~(MarkingBitmap::kBytesPerCell - 1))); + r0 = WordAnd(r0, + IntPtrConstant( + (MemoryChunk::GetAlignmentMaskForAssembler() >> shift) & + ~(MarkingBitmap::kBytesPerCell - 1))); *cell = IntPtrAdd(bitmap, Signed(r0)); } { @@ -187,12 +187,12 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { void InsertIntoRememberedSet(TNode object, TNode slot, SaveFPRegsMode fp_mode) { Label slow_path(this), next(this); - TNode page_header = PageHeaderFromAddress(object); - TNode page = PageFromPageHeader(page_header); + TNode chunk = MemoryChunkFromAddress(object); + TNode page = PageMetadataFromMemoryChunk(chunk); // Load address of SlotSet TNode slot_set = LoadSlotSet(page, &slow_path); - TNode slot_offset = IntPtrSub(slot, page_header); + TNode slot_offset = IntPtrSub(slot, chunk); // Load bucket TNode bucket = LoadBucket(slot_set, slot_offset, &slow_path); @@ -208,7 +208,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { CallCFunctionWithCallerSavedRegisters( function, MachineTypeOf::value, fp_mode, std::make_pair(MachineTypeOf::value, page), - std::make_pair(MachineTypeOf::value, slot)); + std::make_pair(MachineTypeOf::value, slot_offset)); Goto(&next); } @@ -218,7 +218,7 @@ class WriteBarrierCodeStubAssembler : public CodeStubAssembler { TNode LoadSlotSet(TNode page, Label* slow_path) { TNode slot_set = UncheckedCast( Load(MachineType::Pointer(), page, - IntPtrConstant(MemoryChunk::kOldToNewSlotSetOffset))); + IntPtrConstant(MutablePageMetadata::kOldToNewSlotSetOffset))); GotoIf(WordEqual(slot_set, IntPtrConstant(0)), slow_path); return slot_set; } diff --git a/deps/v8/src/builtins/builtins-microtask-queue-gen.cc b/deps/v8/src/builtins/builtins-microtask-queue-gen.cc index 21f3997fa14d9e..cb0109e75798e8 100644 --- a/deps/v8/src/builtins/builtins-microtask-queue-gen.cc +++ b/deps/v8/src/builtins/builtins-microtask-queue-gen.cc @@ -51,6 +51,10 @@ class MicrotaskQueueBuiltinsAssembler : public CodeStubAssembler { void RunPromiseHook(Runtime::FunctionId id, TNode context, TNode promise_or_capability, TNode promiseHookFlags); +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + void SetupContinuationPreservedEmbedderData(TNode microtask); + void ClearContinuationPreservedEmbedderData(); +#endif }; TNode MicrotaskQueueBuiltinsAssembler::GetMicrotaskQueue( @@ -115,6 +119,27 @@ void MicrotaskQueueBuiltinsAssembler::PrepareForContext( SetCurrentContext(native_context); } +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA +void MicrotaskQueueBuiltinsAssembler::SetupContinuationPreservedEmbedderData( + TNode microtask) { + TNode continuation_preserved_embedder_data = LoadObjectField( + microtask, Microtask::kContinuationPreservedEmbedderDataOffset); + Label continuation_preserved_data_done(this); + // The isolate's continuation preserved embedder data is cleared at the start + // of RunMicrotasks and after each microtask, so it only needs to be set if + // it's not undefined. + GotoIf(IsUndefined(continuation_preserved_embedder_data), + &continuation_preserved_data_done); + SetContinuationPreservedEmbedderData(continuation_preserved_embedder_data); + Goto(&continuation_preserved_data_done); + BIND(&continuation_preserved_data_done); +} + +void MicrotaskQueueBuiltinsAssembler::ClearContinuationPreservedEmbedderData() { + SetContinuationPreservedEmbedderData(UndefinedConstant()); +} +#endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( TNode current_context, TNode microtask) { CSA_DCHECK(this, TaggedIsNotSmi(microtask)); @@ -152,6 +177,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( TNode native_context = LoadNativeContext(microtask_context); PrepareForContext(native_context, &done); +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + SetupContinuationPreservedEmbedderData(microtask); +#endif TNode callable = LoadObjectField(microtask, CallableTask::kCallableOffset); { @@ -160,6 +188,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( } RewindEnteredContext(saved_entered_context_count); SetCurrentContext(current_context); +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + ClearContinuationPreservedEmbedderData(); +#endif Goto(&done); } @@ -169,6 +200,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( LoadObjectField(microtask, CallbackTask::kCallbackOffset); const TNode microtask_data = LoadObjectField(microtask, CallbackTask::kDataOffset); +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + SetupContinuationPreservedEmbedderData(microtask); +#endif // If this turns out to become a bottleneck because of the calls // to C++ via CEntry, we can choose to speed them up using a @@ -185,6 +219,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( CallRuntime(Runtime::kRunMicrotaskCallback, current_context, microtask_callback, microtask_data); } +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + ClearContinuationPreservedEmbedderData(); +#endif Goto(&done); } @@ -202,7 +239,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( LoadObjectField(microtask, PromiseResolveThenableJobTask::kThenOffset); const TNode thenable = LoadObjectField( microtask, PromiseResolveThenableJobTask::kThenableOffset); - +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + SetupContinuationPreservedEmbedderData(microtask); +#endif RunAllPromiseHooks(PromiseHookType::kBefore, microtask_context, CAST(promise_to_resolve)); @@ -217,6 +256,9 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( RewindEnteredContext(saved_entered_context_count); SetCurrentContext(current_context); +#ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA + ClearContinuationPreservedEmbedderData(); +#endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA Goto(&done); } @@ -236,27 +278,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset)); #ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - TNode isolate_preserved_embedder_data = LoadObjectField( - microtask, PromiseReactionJobTask:: - kIsolateContinuationPreservedEmbedderDataOffset); - Label isolate_preserved_data_done(this); - GotoIf(IsUndefined(isolate_preserved_embedder_data), - &isolate_preserved_data_done); - SetContinuationPreservedEmbedderData(isolate_preserved_embedder_data); - Goto(&isolate_preserved_data_done); - BIND(&isolate_preserved_data_done); - - TNode context_preserved_embedder_data = LoadObjectField( - microtask, PromiseReactionJobTask:: - kContextContinuationPreservedEmbedderDataOffset); - Label context_preserved_data_done(this); - GotoIf(IsUndefined(context_preserved_embedder_data), - &context_preserved_data_done); - StoreContextElement(native_context, - Context::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX, - context_preserved_embedder_data); - Goto(&context_preserved_data_done); - BIND(&context_preserved_data_done); + SetupContinuationPreservedEmbedderData(microtask); #endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA // Run the promise before/debug hook if enabled. @@ -274,21 +296,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( promise_or_capability); #ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - Label isolate_preserved_data_reset_done(this); - GotoIf(IsUndefined(isolate_preserved_embedder_data), - &isolate_preserved_data_reset_done); - SetContinuationPreservedEmbedderData(UndefinedConstant()); - Goto(&isolate_preserved_data_reset_done); - BIND(&isolate_preserved_data_reset_done); - - Label context_preserved_data_reset_done(this); - GotoIf(IsUndefined(context_preserved_embedder_data), - &context_preserved_data_reset_done); - StoreContextElement(native_context, - Context::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX, - UndefinedConstant()); - Goto(&context_preserved_data_reset_done); - BIND(&context_preserved_data_reset_done); + ClearContinuationPreservedEmbedderData(); #endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA RewindEnteredContext(saved_entered_context_count); @@ -312,27 +320,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( microtask, PromiseReactionJobTask::kPromiseOrCapabilityOffset)); #ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - TNode isolate_preserved_embedder_data = LoadObjectField( - microtask, PromiseReactionJobTask:: - kIsolateContinuationPreservedEmbedderDataOffset); - Label isolate_preserved_data_done(this); - GotoIf(IsUndefined(isolate_preserved_embedder_data), - &isolate_preserved_data_done); - SetContinuationPreservedEmbedderData(isolate_preserved_embedder_data); - Goto(&isolate_preserved_data_done); - BIND(&isolate_preserved_data_done); - - TNode context_preserved_embedder_data = LoadObjectField( - microtask, PromiseReactionJobTask:: - kContextContinuationPreservedEmbedderDataOffset); - Label context_preserved_data_done(this); - GotoIf(IsUndefined(context_preserved_embedder_data), - &context_preserved_data_done); - StoreContextElement(native_context, - Context::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX, - context_preserved_embedder_data); - Goto(&context_preserved_data_done); - BIND(&context_preserved_data_done); + SetupContinuationPreservedEmbedderData(microtask); #endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA // Run the promise before/debug hook if enabled. @@ -350,21 +338,7 @@ void MicrotaskQueueBuiltinsAssembler::RunSingleMicrotask( promise_or_capability); #ifdef V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA - Label isolate_preserved_data_reset_done(this); - GotoIf(IsUndefined(isolate_preserved_embedder_data), - &isolate_preserved_data_reset_done); - SetContinuationPreservedEmbedderData(UndefinedConstant()); - Goto(&isolate_preserved_data_reset_done); - BIND(&isolate_preserved_data_reset_done); - - Label context_preserved_data_reset_done(this); - GotoIf(IsUndefined(context_preserved_embedder_data), - &context_preserved_data_reset_done); - StoreContextElement(native_context, - Context::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX, - UndefinedConstant()); - Goto(&context_preserved_data_reset_done); - BIND(&context_preserved_data_reset_done); + ClearContinuationPreservedEmbedderData(); #endif // V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA RewindEnteredContext(saved_entered_context_count); diff --git a/deps/v8/src/builtins/builtins-regexp-gen.cc b/deps/v8/src/builtins/builtins-regexp-gen.cc index e3befac11c7f92..7f54ca5aee7184 100644 --- a/deps/v8/src/builtins/builtins-regexp-gen.cc +++ b/deps/v8/src/builtins/builtins-regexp-gen.cc @@ -322,13 +322,11 @@ TNode RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo( // - Receiver has no interceptors Label add_dictionary_property_slow(this, Label::kDeferred); TVARIABLE(IntPtrT, var_name_index); - Label add_name_entry_find_index(this), - add_name_entry_known_index(this, &var_name_index), + Label add_name_entry(this, &var_name_index), duplicate_name(this, &var_name_index), next(this); NameDictionaryLookup( CAST(properties), name, &duplicate_name, &var_name_index, - &add_name_entry_find_index, kFindExisting, - &add_name_entry_known_index); + &add_name_entry, kFindExistingOrInsertionIndex); BIND(&duplicate_name); GotoIf(IsUndefined(capture), &next); CSA_DCHECK(this, @@ -339,12 +337,7 @@ TNode RegExpBuiltinsAssembler::ConstructNewResultFromMatchInfo( var_name_index.value(), capture); Goto(&next); - BIND(&add_name_entry_find_index); - FindInsertionEntry(CAST(properties), name, - &var_name_index); - Goto(&add_name_entry_known_index); - - BIND(&add_name_entry_known_index); + BIND(&add_name_entry); AddToDictionary(CAST(properties), name, capture, &add_dictionary_property_slow, var_name_index.value()); diff --git a/deps/v8/src/builtins/builtins-typed-array-gen.cc b/deps/v8/src/builtins/builtins-typed-array-gen.cc index 42e594a0ce9252..1c0fca4db2c8e7 100644 --- a/deps/v8/src/builtins/builtins-typed-array-gen.cc +++ b/deps/v8/src/builtins/builtins-typed-array-gen.cc @@ -488,6 +488,10 @@ void TypedArrayBuiltinsAssembler::StoreJSTypedArrayElementFromNumeric( StoreElement(data_ptr, elements_kind, index, TruncateTaggedToWord32(context, value)); break; + case FLOAT16_ELEMENTS: + StoreElement(data_ptr, elements_kind, index, + TruncateFloat64ToFloat16(LoadHeapNumberValue(CAST(value)))); + break; case FLOAT32_ELEMENTS: StoreElement(data_ptr, elements_kind, index, TruncateFloat64ToFloat32(LoadHeapNumberValue(CAST(value)))); @@ -511,12 +515,13 @@ void TypedArrayBuiltinsAssembler::StoreJSTypedArrayElementFromPreparedValue( TNode context, TNode typed_array, TNode index, TNode prepared_value, ElementsKind elements_kind, Label* if_detached_or_out_of_bounds) { - static_assert( - std::is_same::value || - std::is_same::value || - std::is_same::value || - std::is_same::value, - "Only Word32T, Float32T, Float64T or BigInt values are allowed"); + static_assert(std::is_same::value || + std::is_same::value || + std::is_same::value || + std::is_same::value || + std::is_same::value, + "Only Word32T, Float16T, Float32T, Float64T or BigInt values " + "are allowed"); // ToNumber/ToBigInt (or other functions called by the upper level) may // execute JavaScript code, which could detach the TypedArray's buffer or make // the TypedArray out of bounds. @@ -548,6 +553,14 @@ void TypedArrayBuiltinsAssembler::StoreJSTypedArrayElementFromTagged( if_detached_or_out_of_bounds); break; } + case FLOAT16_ELEMENTS: { + auto prepared_value = PrepareValueForWriteToTypedArray( + value, elements_kind, context); + StoreJSTypedArrayElementFromPreparedValue(context, typed_array, index, + prepared_value, elements_kind, + if_detached_or_out_of_bounds); + break; + } case FLOAT32_ELEMENTS: { auto prepared_value = PrepareValueForWriteToTypedArray( value, elements_kind, context); diff --git a/deps/v8/src/builtins/builtins-wasm-gen.cc b/deps/v8/src/builtins/builtins-wasm-gen.cc index d3a804e97e5af2..77e9ec1aa0788c 100644 --- a/deps/v8/src/builtins/builtins-wasm-gen.cc +++ b/deps/v8/src/builtins/builtins-wasm-gen.cc @@ -74,10 +74,10 @@ TNode WasmBuiltinsAssembler::LoadTablesFromInstanceData( WasmTrustedInstanceData::kTablesOffset); } -TNode WasmBuiltinsAssembler::LoadInternalFunctionsFromInstanceData( +TNode WasmBuiltinsAssembler::LoadFuncRefsFromInstanceData( TNode trusted_data) { - return LoadObjectField( - trusted_data, WasmTrustedInstanceData::kWasmInternalFunctionsOffset); + return LoadObjectField(trusted_data, + WasmTrustedInstanceData::kFuncRefsOffset); } TNode WasmBuiltinsAssembler::LoadManagedObjectMapsFromInstanceData( diff --git a/deps/v8/src/builtins/builtins-wasm-gen.h b/deps/v8/src/builtins/builtins-wasm-gen.h index 7dffb8f400af68..9d404787cc39e2 100644 --- a/deps/v8/src/builtins/builtins-wasm-gen.h +++ b/deps/v8/src/builtins/builtins-wasm-gen.h @@ -27,7 +27,7 @@ class WasmBuiltinsAssembler : public CodeStubAssembler { TNode LoadTablesFromInstanceData(TNode); - TNode LoadInternalFunctionsFromInstanceData( + TNode LoadFuncRefsFromInstanceData( TNode); TNode LoadManagedObjectMapsFromInstanceData( diff --git a/deps/v8/src/builtins/builtins.cc b/deps/v8/src/builtins/builtins.cc index 6a909d15ee7128..88e989513ff594 100644 --- a/deps/v8/src/builtins/builtins.cc +++ b/deps/v8/src/builtins/builtins.cc @@ -216,6 +216,8 @@ const char* Builtins::NameForStackTrace(Isolate* isolate, Builtin builtin) { return "DataView.prototype.getBigInt64"; case Builtin::kDataViewPrototypeGetBigUint64: return "DataView.prototype.getBigUint64"; + case Builtin::kDataViewPrototypeGetFloat16: + return "DataView.prototype.getFloat16"; case Builtin::kDataViewPrototypeGetFloat32: return "DataView.prototype.getFloat32"; case Builtin::kDataViewPrototypeGetFloat64: @@ -236,6 +238,8 @@ const char* Builtins::NameForStackTrace(Isolate* isolate, Builtin builtin) { return "DataView.prototype.setBigInt64"; case Builtin::kDataViewPrototypeSetBigUint64: return "DataView.prototype.setBigUint64"; + case Builtin::kDataViewPrototypeSetFloat16: + return "DataView.prototype.setFloat16"; case Builtin::kDataViewPrototypeSetFloat32: return "DataView.prototype.setFloat32"; case Builtin::kDataViewPrototypeSetFloat64: @@ -471,17 +475,18 @@ CodeEntrypointTag Builtins::EntrypointTagFor(Builtin builtin) { Kind kind = Builtins::KindOf(builtin); switch (kind) { + case CPP: + case TFJ: + return kJSEntrypointTag; case BCH: return kBytecodeHandlerEntrypointTag; + case TFC: + case TFS: case TFH: - return kICHandlerEntrypointTag; case ASM: - // TODO(saelo) consider using this approach for the other kinds as well. return CallInterfaceDescriptorFor(builtin).tag(); - default: - // TODO(saelo): use more fine-grained tags here. - return kDefaultCodeEntrypointTag; } + UNREACHABLE(); } // static diff --git a/deps/v8/src/builtins/convert.tq b/deps/v8/src/builtins/convert.tq index a79e6816e6f5c3..34e7934963e017 100644 --- a/deps/v8/src/builtins/convert.tq +++ b/deps/v8/src/builtins/convert.tq @@ -355,6 +355,13 @@ Convert(f: float64): float32 { Convert(n: Number): float32 { return Convert(ChangeNumberToFloat64(n)); } +Convert(n: Number): float16 { + return TruncateFloat64ToFloat16(ChangeNumberToFloat64(n)); +} + +Convert(n: float16): float64 { + return ChangeFloat16ToFloat64(n); +} Convert(n: int32): float32 { return RoundInt32ToFloat32(n); } diff --git a/deps/v8/src/builtins/data-view.tq b/deps/v8/src/builtins/data-view.tq index 5358fad048b083..0dce66dfaae037 100644 --- a/deps/v8/src/builtins/data-view.tq +++ b/deps/v8/src/builtins/data-view.tq @@ -24,6 +24,8 @@ macro MakeDataViewGetterNameString(kind: constexpr ElementsKind): String { return 'DataView.prototype.getUint32'; } else if constexpr (kind == ElementsKind::INT32_ELEMENTS) { return 'DataView.prototype.getInt32'; + } else if constexpr (kind == ElementsKind::FLOAT16_ELEMENTS) { + return 'DataView.prototype.getFloat16'; } else if constexpr (kind == ElementsKind::FLOAT32_ELEMENTS) { return 'DataView.prototype.getFloat32'; } else if constexpr (kind == ElementsKind::FLOAT64_ELEMENTS) { @@ -50,6 +52,8 @@ macro MakeDataViewSetterNameString(kind: constexpr ElementsKind): String { return 'DataView.prototype.setUint32'; } else if constexpr (kind == ElementsKind::INT32_ELEMENTS) { return 'DataView.prototype.setInt32'; + } else if constexpr (kind == ElementsKind::FLOAT16_ELEMENTS) { + return 'DataView.prototype.setFloat16'; } else if constexpr (kind == ElementsKind::FLOAT32_ELEMENTS) { return 'DataView.prototype.setFloat32'; } else if constexpr (kind == ElementsKind::FLOAT64_ELEMENTS) { @@ -205,6 +209,23 @@ macro LoadDataView32( unreachable; } } +macro LoadDataViewFloat16( + buffer: JSArrayBuffer, offset: uintptr, + requestedLittleEndian: bool): Number { + const dataPointer: RawPtr = buffer.backing_store_ptr; + const b0: uint32 = LoadUint8(dataPointer, offset); + const b1: uint32 = LoadUint8(dataPointer, offset + 1); + let result: uint32; + + if (requestedLittleEndian) { + result = (b1 << 8) | b0; + } else { + result = (b0 << 8) | b1; + } + + const floatRes: float64 = Convert(BitcastUint32ToFloat16(result)); + return Convert(floatRes); +} macro LoadDataViewFloat64( buffer: JSArrayBuffer, offset: uintptr, @@ -449,6 +470,8 @@ transitioning macro DataViewGet( return LoadDataView32(buffer, bufferIndex, littleEndian, kind); } else if constexpr (kind == ElementsKind::INT32_ELEMENTS) { return LoadDataView32(buffer, bufferIndex, littleEndian, kind); + } else if constexpr (kind == ElementsKind::FLOAT16_ELEMENTS) { + return LoadDataViewFloat16(buffer, bufferIndex, littleEndian); } else if constexpr (kind == ElementsKind::FLOAT32_ELEMENTS) { return LoadDataView32(buffer, bufferIndex, littleEndian, kind); } else if constexpr (kind == ElementsKind::FLOAT64_ELEMENTS) { @@ -511,6 +534,15 @@ transitioning javascript builtin DataViewPrototypeGetInt32( context, receiver, offset, isLittleEndian, ElementsKind::INT32_ELEMENTS); } +transitioning javascript builtin DataViewPrototypeGetFloat16( + js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { + const offset: JSAny = arguments[0]; + const isLittleEndian: JSAny = arguments[1]; + return DataViewGet( + context, receiver, offset, isLittleEndian, + ElementsKind::FLOAT16_ELEMENTS); +} + transitioning javascript builtin DataViewPrototypeGetFloat32( js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { const offset: JSAny = arguments[0]; @@ -777,6 +809,11 @@ transitioning macro DataViewSet( StoreDataView16( buffer, bufferIndex, TruncateFloat64ToWord32(doubleValue), littleEndian); + } else if constexpr (kind == ElementsKind::FLOAT16_ELEMENTS) { + const floatValue: float16 = TruncateFloat64ToFloat16(doubleValue); + StoreDataView16( + buffer, bufferIndex, BitcastFloat16ToUint32(floatValue), + littleEndian); } else if constexpr ( kind == ElementsKind::UINT32_ELEMENTS || kind == ElementsKind::INT32_ELEMENTS) { @@ -857,6 +894,16 @@ transitioning javascript builtin DataViewPrototypeSetInt32( ElementsKind::INT32_ELEMENTS); } +transitioning javascript builtin DataViewPrototypeSetFloat16( + js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { + const offset: JSAny = arguments[0]; + const value: JSAny = arguments[1]; + const isLittleEndian: JSAny = arguments[2]; + return DataViewSet( + context, receiver, offset, value, isLittleEndian, + ElementsKind::FLOAT16_ELEMENTS); +} + transitioning javascript builtin DataViewPrototypeSetFloat32( js-implicit context: NativeContext, receiver: JSAny)(...arguments): JSAny { const offset: JSAny = arguments[0]; diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc index 7e29337e1bc8a8..1e6ddbaef78611 100644 --- a/deps/v8/src/builtins/ia32/builtins-ia32.cc +++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc @@ -4085,7 +4085,8 @@ void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, int edi_slot_index) { Immediate(ER::isolate_address(masm->isolate()))); __ mov(Operand(esp, 1 * kSystemPointerSize), kOldSPRegister); - __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2); + __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2, + SetIsolateDataSlots::kNo); __ mov(central_stack_sp, kReturnRegister0); __ pop(kRuntimeCallFunctionRegister); @@ -4131,7 +4132,8 @@ void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm) { __ PrepareCallCFunction(1, ecx); __ Move(Operand(esp, 0 * kSystemPointerSize), Immediate(ER::isolate_address(masm->isolate()))); - __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1); + __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1, + SetIsolateDataSlots::kNo); __ pop(kReturnRegister1); __ pop(kReturnRegister0); @@ -4277,7 +4279,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ mov(Operand(esp, 1 * kSystemPointerSize), Immediate(0)); // argv. __ Move(esi, Immediate(ER::isolate_address(masm->isolate()))); __ mov(Operand(esp, 2 * kSystemPointerSize), esi); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -4434,7 +4436,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -4494,7 +4497,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, __ PushRoot(RootIndex::kUndefinedValue); // kNewTarget switch (mode) { case CallApiCallbackMode::kGeneric: - __ push(FieldOperand(callback, CallHandlerInfo::kDataOffset)); + __ push( + FieldOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); break; case CallApiCallbackMode::kOptimizedNoProfiling: @@ -4548,13 +4552,13 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ push(FieldOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); + __ push(callback); __ PushReturnAddressFrom(argc); __ mov(api_function_address, FieldOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset)); + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset)); __ EnterExitFrame(kApiArgc + kApiStackSpace, StackFrame::API_CALLBACK_EXIT, api_function_address); @@ -4692,7 +4696,7 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { static constexpr int kNameOnStackSize = 1; static constexpr int kStackUnwindSpace = PCA::kArgsLength + kNameOnStackSize; - // The API function takes a name handle and v8::PropertyCallbackInfo + // The API function takes a name local handle and v8::PropertyCallbackInfo // reference, allocate them in non-GCed space of the exit frame. static constexpr int kApiArgc = 2; static constexpr int kApiArg0Offset = 0 * kSystemPointerSize; @@ -4714,8 +4718,12 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { Operand info_object = ExitFrameStackSlotOperand(kApiArgsSize); __ mov(info_object, args_array); - __ RecordComment("Handle"); + __ RecordComment("Local"); +#ifdef V8_ENABLE_DIRECT_LOCAL + __ mov(args_array, Operand(args_array, -kSystemPointerSize)); +#else __ sub(args_array, Immediate(kSystemPointerSize)); +#endif __ mov(ExitFrameStackSlotOperand(kApiArg0Offset), args_array); args_array = no_reg; __ RecordComment("&v8::PropertyCallbackInfo::args_"); diff --git a/deps/v8/src/builtins/js-to-js.tq b/deps/v8/src/builtins/js-to-js.tq index 691271e9caf0c5..5c78db71f9af9b 100644 --- a/deps/v8/src/builtins/js-to-js.tq +++ b/deps/v8/src/builtins/js-to-js.tq @@ -4,8 +4,6 @@ namespace runtime { extern runtime IsWasmExternalFunction(NoContext, JSAny): Boolean; -extern runtime TierUpJSToJSWrapper( - NoContext, WasmApiFunctionRef, WasmFunctionData): JSAny; } // namespace runtime namespace wasm { @@ -77,12 +75,6 @@ transitioning javascript builtin JSToJSWrapper( UnsafeCast(target.shared_function_info.function_data); const ref = UnsafeCast(functionData.internal.ref); - dcheck(ref.wrapper_budget > 0); - ref.wrapper_budget = ref.wrapper_budget - 1; - if (ref.wrapper_budget == 0) { - runtime::TierUpJSToJSWrapper(kNoContext, ref, functionData); - } - const signaturePod = &ref.sig.bytes; const serializedSig = torque_internal::unsafe::NewConstSlice( signaturePod.object, signaturePod.offset, diff --git a/deps/v8/src/builtins/loong64/builtins-loong64.cc b/deps/v8/src/builtins/loong64/builtins-loong64.cc index 7ac1b6d083deec..ed3b109840f352 100644 --- a/deps/v8/src/builtins/loong64/builtins-loong64.cc +++ b/deps/v8/src/builtins/loong64/builtins-loong64.cc @@ -346,9 +346,8 @@ static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm, #endif // !V8_JITLESS __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE)); - __ LoadTrustedPointerField( - bytecode, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset), - kBytecodeArrayIndirectPointerTag); + __ LoadProtectedPointerField( + bytecode, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset)); __ bind(&done); } @@ -1334,7 +1333,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ Move(a2, kInterpreterBytecodeArrayRegister); static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch"); __ ReplaceClosureCodeWithOptimizedCode(a2, closure); - __ JumpCodeObject(a2); + __ JumpCodeObject(a2, kJSEntrypointTag); __ bind(&install_baseline_code); __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode); @@ -1712,9 +1711,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { __ JumpIfObjectType(&builtin_trampoline, ne, t0, INTERPRETER_DATA_TYPE, kInterpreterDispatchTableRegister); - __ LoadTaggedField( + __ LoadProtectedPointerField( t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset)); - __ LoadCodeInstructionStart(t0, t0); + __ LoadCodeInstructionStart(t0, t0, kJSEntrypointTag); __ Branch(&trampoline_loaded); __ bind(&builtin_trampoline); @@ -1971,7 +1970,7 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, // Load deoptimization data from the code object. // = [#deoptimization_data_offset] - __ LoadTaggedField( + __ LoadProtectedPointerField( a1, MemOperand(maybe_target_code, Code::kDeoptimizationDataOrInterpreterDataOffset - kHeapObjectTag)); @@ -1979,11 +1978,12 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, // Load the OSR entrypoint offset from the deoptimization data. // = [#header_size + #osr_pc_offset] __ SmiUntagField(a1, - MemOperand(a1, FixedArray::OffsetOfElementAt( + MemOperand(a1, TrustedFixedArray::OffsetOfElementAt( DeoptimizationData::kOsrPcOffsetIndex) - kHeapObjectTag)); - __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code); + __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code, + kJSEntrypointTag); // Compute the target address = code_entry + osr_offset // = + @@ -3152,7 +3152,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ mov(a0, zero_reg); __ mov(a1, zero_reg); __ li(a2, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -3286,7 +3286,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -3359,7 +3360,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, switch (mode) { case CallApiCallbackMode::kGeneric: __ LoadTaggedField( - scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); __ St_d(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -3412,16 +3414,13 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ LoadTaggedField( - scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ St_d(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ St_d(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ LoadExternalPointerField( api_function_address, FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset), - kCallHandlerInfoCallbackTag); + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset), + kFunctionTemplateInfoCallbackTag); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -3556,8 +3555,14 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mov(name_arg, scratch); + USE(kNameStackIndex); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ Add_d(name_arg, sp, Operand(kNameStackIndex * kSystemPointerSize)); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ Add_d(property_callback_info_arg, sp, Operand(kPCAStackIndex * kSystemPointerSize)); @@ -3951,7 +3956,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm, __ PrepareCallCFunction(3, 0, a4); __ CallCFunction(get_baseline_pc, 3, 0); } - __ LoadCodeInstructionStart(code_obj, code_obj); + __ LoadCodeInstructionStart(code_obj, code_obj, kJSEntrypointTag); __ Add_d(code_obj, code_obj, kReturnRegister0); __ Pop(kInterpreterAccumulatorRegister); diff --git a/deps/v8/src/builtins/math.tq b/deps/v8/src/builtins/math.tq index 5a64d533517ee3..edffab76c74490 100644 --- a/deps/v8/src/builtins/math.tq +++ b/deps/v8/src/builtins/math.tq @@ -282,6 +282,14 @@ transitioning javascript builtin MathFround( return Convert(x64); } +// ES6 #sec-math.f16round +transitioning javascript builtin MathF16round( + js-implicit context: NativeContext)(x: JSAny): Number { + const x16 = Convert(ToNumber_Inline(x)); + const x64 = Convert(x16); + return Convert(x64); +} + // ES6 #sec-math.imul transitioning javascript builtin MathImul( js-implicit context: NativeContext)(x: JSAny, y: JSAny): Number { diff --git a/deps/v8/src/builtins/mips64/builtins-mips64.cc b/deps/v8/src/builtins/mips64/builtins-mips64.cc index 311b0ddffa5217..10d6990961bfee 100644 --- a/deps/v8/src/builtins/mips64/builtins-mips64.cc +++ b/deps/v8/src/builtins/mips64/builtins-mips64.cc @@ -1277,7 +1277,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ Move(a2, kInterpreterBytecodeArrayRegister); static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch"); __ ReplaceClosureCodeWithOptimizedCode(a2, closure, t0, t1); - __ JumpCodeObject(a2); + __ JumpCodeObject(a2, kJSEntrypointTag); __ bind(&install_baseline_code); __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode); @@ -1653,7 +1653,7 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { Operand(INTERPRETER_DATA_TYPE)); __ Ld(t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset)); - __ LoadCodeInstructionStart(t0, t0); + __ LoadCodeInstructionStart(t0, t0, kJSEntrypointTag); __ Branch(&trampoline_loaded); __ bind(&builtin_trampoline); @@ -1916,7 +1916,8 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, DeoptimizationData::kOsrPcOffsetIndex) - kHeapObjectTag)); - __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code); + __ LoadCodeInstructionStart(maybe_target_code, maybe_target_code, + kJSEntrypointTag); // Compute the target address = code_entry + osr_offset // = + @@ -3268,7 +3269,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -3340,7 +3342,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // kData. switch (mode) { case CallApiCallbackMode::kGeneric: - __ Ld(scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + __ Ld(scratch2, FieldMemOperand( + callback, FunctionTemplateInfo::kCallbackDataOffset)); __ Sd(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -3393,13 +3396,11 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ Ld(scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ Sd(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ Sd(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ Ld(api_function_address, - FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset)); + FieldMemOperand( + callback, FunctionTemplateInfo::kMaybeRedirectedCallbackOffset)); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -3533,8 +3534,14 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mov(name_arg, scratch); + USE(kNameStackIndex); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ Daddu(name_arg, sp, Operand(kNameStackIndex * kSystemPointerSize)); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ Daddu(property_callback_info_arg, sp, Operand(kPCAStackIndex * kSystemPointerSize)); @@ -3932,7 +3939,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm, __ PrepareCallCFunction(3, 0, a4); __ CallCFunction(get_baseline_pc, 3, 0); } - __ LoadCodeInstructionStart(code_obj, code_obj); + __ LoadCodeInstructionStart(code_obj, code_obj, kJSEntrypointTag); __ Daddu(code_obj, code_obj, kReturnRegister0); __ Pop(kInterpreterAccumulatorRegister); diff --git a/deps/v8/src/builtins/ppc/builtins-ppc.cc b/deps/v8/src/builtins/ppc/builtins-ppc.cc index 62f7b35518b682..5d138cf817a131 100644 --- a/deps/v8/src/builtins/ppc/builtins-ppc.cc +++ b/deps/v8/src/builtins/ppc/builtins-ppc.cc @@ -3382,7 +3382,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ li(r3, Operand::Zero()); __ li(r4, Operand::Zero()); __ Move(r5, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -3574,7 +3574,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -3646,7 +3647,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, switch (mode) { case CallApiCallbackMode::kGeneric: __ LoadTaggedField( - scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset), + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset), r0); __ StoreU64(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); @@ -3707,16 +3709,13 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ LoadTaggedField( - scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset), r0); - __ StoreU64(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ StoreU64(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ LoadExternalPointerField( api_function_address, FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset), - kCallHandlerInfoCallbackTag, no_reg, scratch); + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset), + kFunctionTemplateInfoCallbackTag, no_reg, scratch); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -3834,8 +3833,14 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); // Load address of v8::PropertyAccessorInfo::args_ array and name handle. - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mr(name_arg, scratch); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ mr(name_arg, sp); +#endif + // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ addi(property_callback_info_arg, name_arg, Operand(1 * kSystemPointerSize)); diff --git a/deps/v8/src/builtins/promise-abstract-operations.tq b/deps/v8/src/builtins/promise-abstract-operations.tq index fdbc6faa9776e0..834007399ffdec 100644 --- a/deps/v8/src/builtins/promise-abstract-operations.tq +++ b/deps/v8/src/builtins/promise-abstract-operations.tq @@ -128,6 +128,10 @@ transitioning macro MorphAndEnqueuePromiseReaction( static_assert( kPromiseReactionPromiseOrCapabilityOffset == kPromiseReactionJobTaskPromiseOrCapabilityOffset); + @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) + static_assert( + kPromiseReactionContinuationPreservedEmbedderDataOffset == + kPromiseReactionJobTaskContinuationPreservedEmbedderDataOffset); } else { static_assert(reactionType == kPromiseReactionReject); *UnsafeConstCast(&promiseReaction.map) = @@ -141,6 +145,10 @@ transitioning macro MorphAndEnqueuePromiseReaction( static_assert( kPromiseReactionPromiseOrCapabilityOffset == kPromiseReactionJobTaskPromiseOrCapabilityOffset); + @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) + static_assert( + kPromiseReactionContinuationPreservedEmbedderDataOffset == + kPromiseReactionJobTaskContinuationPreservedEmbedderDataOffset); } } @@ -450,13 +458,11 @@ transitioning macro PerformPromiseThenImpl( // PromiseReaction holding both the onFulfilled and onRejected callbacks. // Once the {promise} is resolved we decide on the concrete handler to // push onto the microtask queue. - const handlerContext = ExtractHandlerContext(onFulfilled, onRejected); const promiseReactions = UnsafeCast<(Zero | PromiseReaction)>(promise.reactions_or_result); const reaction = NewPromiseReaction( - handlerContext, promiseReactions, resultPromiseOrCapability, - onFulfilled, onRejected); + promiseReactions, resultPromiseOrCapability, onFulfilled, onRejected); promise.reactions_or_result = reaction; } else { const reactionsOrResult = promise.reactions_or_result; diff --git a/deps/v8/src/builtins/promise-misc.tq b/deps/v8/src/builtins/promise-misc.tq index 6b167839f107b9..b769435d6386bc 100644 --- a/deps/v8/src/builtins/promise-misc.tq +++ b/deps/v8/src/builtins/promise-misc.tq @@ -77,19 +77,14 @@ macro NewPromiseFulfillReactionJobTask( promiseOrCapability: JSPromise|PromiseCapability| Undefined): PromiseFulfillReactionJobTask { @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { - const isolateContinuationData = GetContinuationPreservedEmbedderData(); - const nativeContext = LoadNativeContext(handlerContext); return new PromiseFulfillReactionJobTask{ map: PromiseFulfillReactionJobTaskMapConstant(), + continuation_preserved_embedder_data: + GetContinuationPreservedEmbedderData(), argument, context: handlerContext, handler, - promise_or_capability: promiseOrCapability, - isolate_continuation_preserved_embedder_data: isolateContinuationData, - context_continuation_preserved_embedder_data: - *ContextSlot( - nativeContext, - ContextSlot::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX) + promise_or_capability: promiseOrCapability }; } @@ -110,19 +105,14 @@ macro NewPromiseRejectReactionJobTask( promiseOrCapability: JSPromise|PromiseCapability| Undefined): PromiseRejectReactionJobTask { @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { - const isolateContinuationData = GetContinuationPreservedEmbedderData(); - const nativeContext = LoadNativeContext(handlerContext); return new PromiseRejectReactionJobTask{ map: PromiseRejectReactionJobTaskMapConstant(), + continuation_preserved_embedder_data: + GetContinuationPreservedEmbedderData(), argument, context: handlerContext, handler, - promise_or_capability: promiseOrCapability, - isolate_continuation_preserved_embedder_data: isolateContinuationData, - context_continuation_preserved_embedder_data: - *ContextSlot( - nativeContext, - ContextSlot::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX) + promise_or_capability: promiseOrCapability }; } @@ -305,30 +295,23 @@ transitioning macro NewJSPromise( } macro NewPromiseReaction( - implicit context: Context)(handlerContext: Context, - next: Zero|PromiseReaction, + implicit context: Context)(next: Zero|PromiseReaction, promiseOrCapability: JSPromise|PromiseCapability|Undefined, fulfillHandler: Callable|Undefined, rejectHandler: Callable|Undefined): PromiseReaction { @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { - const isolateContinuationData = GetContinuationPreservedEmbedderData(); - const nativeContext = LoadNativeContext(handlerContext); return new PromiseReaction{ map: PromiseReactionMapConstant(), + continuation_preserved_embedder_data: + GetContinuationPreservedEmbedderData(), next: next, reject_handler: rejectHandler, fulfill_handler: fulfillHandler, - promise_or_capability: promiseOrCapability, - isolate_continuation_preserved_embedder_data: isolateContinuationData, - context_continuation_preserved_embedder_data: - *ContextSlot( - nativeContext, - ContextSlot::CONTINUATION_PRESERVED_EMBEDDER_DATA_INDEX) + promise_or_capability: promiseOrCapability }; } @ifnot(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { - dcheck(IsContext(handlerContext)); return new PromiseReaction{ map: PromiseReactionMapConstant(), next: next, @@ -360,13 +343,27 @@ macro NewPromiseResolveThenableJobTask( // 1. Let job be a new Job abstract closure with no parameters that // captures promiseToResolve, thenable, and then... // 5. Return { [[Job]]: job, [[Realm]]: thenRealm }. - return new PromiseResolveThenableJobTask{ - map: PromiseResolveThenableJobTaskMapConstant(), - context: nativeContext, - promise_to_resolve: promiseToResolve, - thenable, - then - }; + @if(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { + return new PromiseResolveThenableJobTask{ + map: PromiseResolveThenableJobTaskMapConstant(), + continuation_preserved_embedder_data: + GetContinuationPreservedEmbedderData(), + context: nativeContext, + promise_to_resolve: promiseToResolve, + thenable, + then + }; + } + + @ifnot(V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA) { + return new PromiseResolveThenableJobTask{ + map: PromiseResolveThenableJobTaskMapConstant(), + context: nativeContext, + promise_to_resolve: promiseToResolve, + thenable, + then + }; + } } struct InvokeThenOneArgFunctor { diff --git a/deps/v8/src/builtins/riscv/builtins-riscv.cc b/deps/v8/src/builtins/riscv/builtins-riscv.cc index 94593f08920567..9ef4d5f82df266 100644 --- a/deps/v8/src/builtins/riscv/builtins-riscv.cc +++ b/deps/v8/src/builtins/riscv/builtins-riscv.cc @@ -50,6 +50,7 @@ enum class ArgumentsElementType { void Generate_PushArguments(MacroAssembler* masm, Register array, Register argc, Register scratch, Register scratch2, ArgumentsElementType element_type) { + ASM_CODE_COMMENT(masm); DCHECK(!AreAliased(array, argc, scratch)); Label loop, entry; __ SubWord(scratch, argc, Operand(kJSArgcReceiverSlots)); @@ -314,22 +315,50 @@ static void AssertCodeIsBaseline(MacroAssembler* masm, Register code, __ Assert(eq, AbortReason::kExpectedBaselineData, scratch, Operand(static_cast(CodeKind::BASELINE))); } + +// Equivalent of SharedFunctionInfo::GetData +static void GetSharedFunctionInfoData(MacroAssembler* masm, Register data, + Register sfi, Register scratch) { + ASM_CODE_COMMENT(masm); +#ifdef V8_ENABLE_SANDBOX + DCHECK(!AreAliased(data, scratch)); + DCHECK(!AreAliased(sfi, scratch)); + // Use trusted_function_data if non-empy, otherwise the regular function_data. + Label use_tagged_field, done; + __ Lwu(scratch, + FieldMemOperand(sfi, SharedFunctionInfo::kTrustedFunctionDataOffset)); + __ Branch(&use_tagged_field, eq, scratch, Operand(zero_reg)); + __ ResolveIndirectPointerHandle(data, scratch, kUnknownIndirectPointerTag); + __ Branch(&done); + __ bind(&use_tagged_field); + __ LoadTaggedField( + data, FieldMemOperand(sfi, SharedFunctionInfo::kFunctionDataOffset)); + __ bind(&done); +#else + __ LoadTaggedField( + data, FieldMemOperand(sfi, SharedFunctionInfo::kFunctionDataOffset)); +#endif // V8_ENABLE_SANDBOX +} // TODO(v8:11429): Add a path for "not_compiled" and unify the two uses under // the more general dispatch. static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm, - Register sfi_data, + Register sfi, + Register bytecode, Register scratch1, Label* is_baseline) { + DCHECK(!AreAliased(bytecode, scratch1)); ASM_CODE_COMMENT(masm); Label done; - __ GetObjectType(sfi_data, scratch1, scratch1); + Register data = bytecode; + GetSharedFunctionInfoData(masm, data, sfi, scratch1); + __ GetObjectType(data, scratch1, scratch1); #ifndef V8_JITLESS if (v8_flags.debug_code) { Label not_baseline; __ Branch(¬_baseline, ne, scratch1, Operand(CODE_TYPE)); - AssertCodeIsBaseline(masm, sfi_data, scratch1); + AssertCodeIsBaseline(masm, data, scratch1); __ Branch(is_baseline); __ bind(¬_baseline); } else { @@ -337,11 +366,9 @@ static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm, } #endif // !V8_JITLESS - __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE), - Label::Distance::kNear); - __ LoadTaggedField( - sfi_data, - FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset)); + __ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE)); + __ LoadProtectedPointerField( + bytecode, FieldMemOperand(data, InterpreterData::kBytecodeArrayOffset)); __ bind(&done); } @@ -429,13 +456,14 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Underlying function needs to have bytecode available. if (v8_flags.debug_code) { Label is_baseline; + Register sfi = a3; + Register bytecode = a3; __ LoadTaggedField( - a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); - __ LoadTaggedField( - a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); - GetSharedFunctionInfoBytecodeOrBaseline(masm, a3, a0, &is_baseline); - __ GetObjectType(a3, a3, a3); - __ Assert(eq, AbortReason::kMissingBytecodeArray, a3, + sfi, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); + GetSharedFunctionInfoBytecodeOrBaseline(masm, sfi, bytecode, t5, + &is_baseline); + __ GetObjectType(a3, a3, bytecode); + __ Assert(eq, AbortReason::kMissingBytecodeArray, bytecode, Operand(BYTECODE_ARRAY_TYPE)); __ bind(&is_baseline); } @@ -1112,16 +1140,14 @@ void Builtins::Generate_InterpreterEntryTrampoline( Register closure = a1; // Get the bytecode array from the function object and load it into // kInterpreterBytecodeArrayRegister. + Register sfi = a4; __ LoadTaggedField( - kScratchReg, - FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); - ResetSharedFunctionInfoAge(masm, kScratchReg); - __ LoadTaggedField( - kInterpreterBytecodeArrayRegister, - FieldMemOperand(kScratchReg, SharedFunctionInfo::kFunctionDataOffset)); + sfi, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); + ResetSharedFunctionInfoAge(masm, sfi); + Label is_baseline; GetSharedFunctionInfoBytecodeOrBaseline( - masm, kInterpreterBytecodeArrayRegister, kScratchReg, &is_baseline); + masm, sfi, kInterpreterBytecodeArrayRegister, kScratchReg, &is_baseline); // The bytecode array could have been flushed from the shared function info, // if so, call into CompileLazy. @@ -1328,7 +1354,7 @@ void Builtins::Generate_InterpreterEntryTrampoline( __ Move(a2, kInterpreterBytecodeArrayRegister); static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch"); __ ReplaceClosureCodeWithOptimizedCode(a2, closure); - __ JumpCodeObject(a2); + __ JumpCodeObject(a2, kJSEntrypointTag); __ bind(&install_baseline_code); __ GenerateTailCallToReturnedCode(Runtime::kInstallBaselineCode); @@ -1699,16 +1725,15 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { __ LoadWord(t0, MemOperand(fp, StandardFrameConstants::kFunctionOffset)); __ LoadTaggedField( t0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); - __ LoadTaggedField( - t0, FieldMemOperand(t0, SharedFunctionInfo::kFunctionDataOffset)); + GetSharedFunctionInfoData(masm, t0, t0, t1); __ GetObjectType(t0, kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister); __ Branch(&builtin_trampoline, ne, kInterpreterDispatchTableRegister, Operand(INTERPRETER_DATA_TYPE), Label::Distance::kNear); - __ LoadTaggedField( + __ LoadProtectedPointerField( t0, FieldMemOperand(t0, InterpreterData::kInterpreterTrampolineOffset)); - __ LoadCodeInstructionStart(t0, t0); + __ LoadCodeInstructionStart(t0, t0, kJSEntrypointTag); __ BranchShort(&trampoline_loaded); __ bind(&builtin_trampoline); @@ -1967,18 +1992,17 @@ void OnStackReplacement(MacroAssembler* masm, OsrSourceTier source, // Load deoptimization data from the code object. // = [#deoptimization_data_offset] - __ LoadTaggedField( - a1, MemOperand(a0, Code::kDeoptimizationDataOrInterpreterDataOffset - - kHeapObjectTag)); + __ LoadProtectedPointerField( + a1, FieldMemOperand(maybe_target_code, + Code::kDeoptimizationDataOrInterpreterDataOffset)); // Load the OSR entrypoint offset from the deoptimization data. // = [#header_size + #osr_pc_offset] - __ SmiUntagField(a1, - MemOperand(a1, FixedArray::OffsetOfElementAt( - DeoptimizationData::kOsrPcOffsetIndex) - - kHeapObjectTag)); + __ SmiUntagField( + a1, FieldMemOperand(a1, TrustedFixedArray::OffsetOfElementAt( + DeoptimizationData::kOsrPcOffsetIndex))); - __ LoadCodeInstructionStart(a0, a0); + __ LoadCodeInstructionStart(a0, a0, kJSEntrypointTag); // Compute the target address = code_entry + osr_offset // = + @@ -3085,7 +3109,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ Move(a0, zero_reg); __ Move(a1, zero_reg); __ li(a2, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -3270,7 +3294,27 @@ void Builtins::Generate_WasmToJsWrapperAsm(MacroAssembler* masm) { } void Builtins::Generate_WasmTrapHandlerLandingPad(MacroAssembler* masm) { - __ Trap(); + // This builtin gets called from the WebAssembly trap handler when an + // out-of-bounds memory access happened or when a null reference gets + // dereferenced. This builtin then fakes a call from the instruction that + // triggered the signal to the runtime. This is done by setting a return + // address and then jumping to a builtin which will call further to the + // runtime. + // As the return address we use the fault address + 1. Using the fault address + // itself would cause problems with safepoints and source positions. + // + // The problem with safepoints is that a safepoint has to be registered at the + // return address, and that at most one safepoint should be registered at a + // location. However, there could already be a safepoint registered at the + // fault address if the fault address is the return address of a call. + // + // The problem with source positions is that the stack trace code looks for + // the source position of a call before the return address. The source + // position of the faulty memory access, however, is recorded at the fault + // address. Therefore the stack trace code would not find the source position + // if we used the fault address as the return address. + __ AddWord(ra, kWasmTrapHandlerFaultAddressRegister, 1); + __ TailCallBuiltin(Builtin::kWasmTrapHandlerThrowTrap); } void Builtins::Generate_WasmSuspend(MacroAssembler* masm) { @@ -3330,7 +3374,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -3402,7 +3447,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, switch (mode) { case CallApiCallbackMode::kGeneric: __ LoadTaggedField( - scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); __ StoreWord(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -3458,16 +3504,13 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ LoadTaggedField( - scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ StoreWord(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ StoreWord(callback, MemOperand(sp, 0 * kSystemPointerSize)); __ LoadExternalPointerField( api_function_address, FieldMemOperand(callback, - CallHandlerInfo::kMaybeRedirectedCallbackOffset), - kCallHandlerInfoCallbackTag); + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset), + kFunctionTemplateInfoCallbackTag); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -3608,8 +3651,13 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ Move(name_arg, scratch); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ Move(name_arg, sp); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ AddWord(property_callback_info_arg, name_arg, Operand(1 * kSystemPointerSize)); @@ -3913,9 +3961,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm, ResetSharedFunctionInfoAge(masm, code_obj); } - __ LoadTaggedField( - code_obj, - FieldMemOperand(code_obj, SharedFunctionInfo::kFunctionDataOffset)); + GetSharedFunctionInfoData(masm, code_obj, code_obj, t2); // Check if we have baseline code. For OSR entry it is safe to assume we // always have baseline code. @@ -3957,10 +4003,12 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm, Label install_baseline_code; // Check if feedback vector is valid. If not, call prepare for baseline to // allocate it. - UseScratchRegisterScope temps(masm); - Register type = temps.Acquire(); - __ GetObjectType(feedback_vector, type, type); - __ Branch(&install_baseline_code, ne, type, Operand(FEEDBACK_VECTOR_TYPE)); + { + UseScratchRegisterScope temps(masm); + Register type = temps.Acquire(); + __ GetObjectType(feedback_vector, type, type); + __ Branch(&install_baseline_code, ne, type, Operand(FEEDBACK_VECTOR_TYPE)); + } // Save BytecodeOffset from the stack frame. __ SmiUntag(kInterpreterBytecodeOffsetRegister, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); @@ -4020,7 +4068,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm, __ PrepareCallCFunction(3, 0, a4); __ CallCFunction(get_baseline_pc, 3, 0); } - __ LoadCodeInstructionStart(code_obj, code_obj); + __ LoadCodeInstructionStart(code_obj, code_obj, kJSEntrypointTag); __ AddWord(code_obj, code_obj, kReturnRegister0); __ Pop(kInterpreterAccumulatorRegister); diff --git a/deps/v8/src/builtins/s390/builtins-s390.cc b/deps/v8/src/builtins/s390/builtins-s390.cc index d6b19ab4c471de..b6d38b35cc65b5 100644 --- a/deps/v8/src/builtins/s390/builtins-s390.cc +++ b/deps/v8/src/builtins/s390/builtins-s390.cc @@ -3297,7 +3297,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ mov(r2, Operand::Zero()); __ mov(r3, Operand::Zero()); __ Move(r4, ExternalReference::isolate_address(masm->isolate())); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } // Retrieve the handler context, SP and FP. @@ -3479,7 +3479,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -3551,7 +3552,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, switch (mode) { case CallApiCallbackMode::kGeneric: __ LoadTaggedField( - scratch2, FieldMemOperand(callback, CallHandlerInfo::kDataOffset)); + scratch2, + FieldMemOperand(callback, FunctionTemplateInfo::kCallbackDataOffset)); __ StoreU64(scratch2, MemOperand(sp, FCA::kDataIndex * kSystemPointerSize)); break; @@ -3612,14 +3614,12 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ LoadTaggedField( - scratch, - FieldMemOperand(callback, CallHandlerInfo::kOwnerTemplateOffset)); - __ StoreU64(scratch, MemOperand(sp, 0 * kSystemPointerSize)); + __ StoreU64(callback, MemOperand(sp, 0 * kSystemPointerSize)); - __ LoadU64(api_function_address, - FieldMemOperand( - callback, CallHandlerInfo::kMaybeRedirectedCallbackOffset)); + __ LoadU64( + api_function_address, + FieldMemOperand(callback, + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset)); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT); } else { @@ -3733,8 +3733,13 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { __ Push(smi_zero, scratch); __ RecordComment( "Load address of v8::PropertyAccessorInfo::args_ array and name handle."); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ mov(name_arg, scratch); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ mov(name_arg, sp); +#endif // property_callback_info_arg = v8::PCI::args_ (= &ShouldThrow) __ AddS64(property_callback_info_arg, name_arg, Operand(1 * kSystemPointerSize)); diff --git a/deps/v8/src/builtins/typed-array.tq b/deps/v8/src/builtins/typed-array.tq index bcb293eb542437..4f49b737371dc7 100644 --- a/deps/v8/src/builtins/typed-array.tq +++ b/deps/v8/src/builtins/typed-array.tq @@ -14,6 +14,7 @@ type Uint16Elements extends ElementsKind; type Int16Elements extends ElementsKind; type Uint32Elements extends ElementsKind; type Int32Elements extends ElementsKind; +type Float16Elements extends ElementsKind; type Float32Elements extends ElementsKind; type Float64Elements extends ElementsKind; type Uint8ClampedElements extends ElementsKind; @@ -146,6 +147,8 @@ macro GetTypedArrayAccessor(elementsKindParam: ElementsKind): if (IsElementsKindGreaterThan(elementsKind, ElementsKind::UINT32_ELEMENTS)) { if (elementsKind == ElementsKind::INT32_ELEMENTS) { return GetTypedArrayAccessor(); + } else if (elementsKind == ElementsKind::FLOAT16_ELEMENTS) { + return GetTypedArrayAccessor(); } else if (elementsKind == ElementsKind::FLOAT32_ELEMENTS) { return GetTypedArrayAccessor(); } else if (elementsKind == ElementsKind::FLOAT64_ELEMENTS) { @@ -266,6 +269,9 @@ KindForArrayType(): constexpr ElementsKind { KindForArrayType(): constexpr ElementsKind { return ElementsKind::INT32_ELEMENTS; } +KindForArrayType(): constexpr ElementsKind { + return ElementsKind::FLOAT16_ELEMENTS; +} KindForArrayType(): constexpr ElementsKind { return ElementsKind::FLOAT32_ELEMENTS; } diff --git a/deps/v8/src/builtins/wasm-strings.tq b/deps/v8/src/builtins/wasm-strings.tq index fb103155db439f..7d0e88381db699 100644 --- a/deps/v8/src/builtins/wasm-strings.tq +++ b/deps/v8/src/builtins/wasm-strings.tq @@ -30,8 +30,8 @@ transitioning javascript builtin WebAssemblyStringFromWtf16Array( js-implicit context: Context)(...arguments): JSAny { const array = WasmCastToSpecialPrimitiveArray(context, arguments[0], SmiConstant(16)); - const start = ChangeNumberToUint32(ToInteger_Inline(arguments[1])); - const end = ChangeNumberToUint32(ToInteger_Inline(arguments[2])); + const start = NumberToUint32(ToNumber_Inline(arguments[1])); + const end = NumberToUint32(ToNumber_Inline(arguments[2])); return wasm::WasmStringNewWtf16Array(array, start, end); } @@ -42,8 +42,8 @@ transitioning javascript builtin WebAssemblyStringFromUtf8Array( js-implicit context: Context)(...arguments): JSAny { const array = WasmCastToSpecialPrimitiveArray(context, arguments[0], SmiConstant(8)); - const start = ChangeNumberToUint32(ToInteger_Inline(arguments[1])); - const end = ChangeNumberToUint32(ToInteger_Inline(arguments[2])); + const start = NumberToUint32(ToNumber_Inline(arguments[1])); + const end = NumberToUint32(ToNumber_Inline(arguments[2])); return wasm::WasmStringNewWtf8Array( start, end, array, SmiConstant(kLossyUtf8)); } @@ -54,7 +54,7 @@ transitioning javascript builtin WebAssemblyStringIntoUtf8Array( const string = Cast(arguments[0]) otherwise goto IllegalCast; const array = WasmCastToSpecialPrimitiveArray(context, arguments[1], SmiConstant(8)); - const start = ChangeNumberToUint32(ToInteger_Inline(arguments[2])); + const start = NumberToUint32(ToNumber_Inline(arguments[2])); return runtime::WasmStringEncodeWtf8Array( context, SmiConstant(kLossyUtf8), string, array, ChangeUint32ToTagged(start)); @@ -79,7 +79,7 @@ transitioning javascript builtin WebAssemblyStringToWtf16Array( const string = Cast(arguments[0]) otherwise goto IllegalCast; const array = WasmCastToSpecialPrimitiveArray(context, arguments[1], SmiConstant(16)); - const start = ChangeNumberToUint32(ToInteger_Inline(arguments[2])); + const start = NumberToUint32(ToNumber_Inline(arguments[2])); const written = wasm::WasmStringEncodeWtf16Array(string, array, start); return Convert(written); } label IllegalCast deferred { @@ -89,14 +89,14 @@ transitioning javascript builtin WebAssemblyStringToWtf16Array( transitioning javascript builtin WebAssemblyStringFromCharCode( js-implicit context: Context)(...arguments): JSAny { - const code = ChangeNumberToUint32(ToInteger_Inline(arguments[0])); + const code = NumberToUint32(ToNumber_Inline(arguments[0])); return StringFromSingleCharCode(%RawDownCast(code & 0xFFFF)); } transitioning javascript builtin WebAssemblyStringFromCodePoint( js-implicit context: Context)(...arguments): JSAny { - const code = ToInteger_Inline(arguments[0]); - const codeUint = ChangeNumberToUint32(code); + const code = ToNumber_Inline(arguments[0]); + const codeUint = NumberToUint32(code); if (codeUint <= 0xFFFF) { return StringFromSingleCharCode(%RawDownCast(codeUint)); } @@ -107,7 +107,7 @@ transitioning javascript builtin WebAssemblyStringCodePointAt( js-implicit context: Context)(...arguments): JSAny { try { const string = Cast(arguments[0]) otherwise goto IllegalCast; - const index = ChangeNumberToUint32(ToInteger_Inline(arguments[1])); + const index = NumberToUint32(ToNumber_Inline(arguments[1])); if (index >= Unsigned(string.length)) goto OOB; const code: int32 = string::LoadSurrogatePairAt( string, string.length_intptr, Signed(Convert(index)), @@ -124,7 +124,7 @@ transitioning javascript builtin WebAssemblyStringCharCodeAt( js-implicit context: Context)(...arguments): JSAny { try { const string = Cast(arguments[0]) otherwise goto IllegalCast; - const index = ChangeNumberToUint32(ToInteger_Inline(arguments[1])); + const index = NumberToUint32(ToNumber_Inline(arguments[1])); if (index >= Unsigned(string.length)) goto OOB; const code: char16 = StringCharCodeAt(string, Convert(index)); return SmiTag(code); @@ -171,8 +171,8 @@ transitioning javascript builtin WebAssemblyStringSubstring( js-implicit context: Context)(...arguments): JSAny { try { const string = Cast(arguments[0]) otherwise goto IllegalCast; - const start = ChangeNumberToUint32(ToInteger_Inline(arguments[1])); - const end = ChangeNumberToUint32(ToInteger_Inline(arguments[2])); + const start = NumberToUint32(ToNumber_Inline(arguments[1])); + const end = NumberToUint32(ToNumber_Inline(arguments[2])); return wasm::WasmStringViewWtf16Slice(string, start, end); } label IllegalCast deferred { Trap(context, MessageTemplate::kWasmTrapIllegalCast); diff --git a/deps/v8/src/builtins/wasm-to-js.tq b/deps/v8/src/builtins/wasm-to-js.tq index 87e43f6f7835c3..bb228e34f124d3 100644 --- a/deps/v8/src/builtins/wasm-to-js.tq +++ b/deps/v8/src/builtins/wasm-to-js.tq @@ -56,6 +56,7 @@ macro HandleF32Returns( @export transitioning macro WasmToJSWrapper(ref: WasmApiFunctionRef): WasmToJSResult { + dcheck(Is(ref)); // Spill the signature on the stack so that it can be read by the GC. This is // done in the very beginning before a GC could be triggered. // Caller FP + return address. diff --git a/deps/v8/src/builtins/wasm.tq b/deps/v8/src/builtins/wasm.tq index 4179bcb7fd474e..f1dfe08960e9ff 100644 --- a/deps/v8/src/builtins/wasm.tq +++ b/deps/v8/src/builtins/wasm.tq @@ -3,6 +3,7 @@ // found in the LICENSE file. #include 'src/builtins/builtins-wasm-gen.h' +#include 'src/builtins/builtins-call-gen.h' namespace runtime { extern runtime WasmMemoryGrow(Context, WasmTrustedInstanceData, Smi, Smi): Smi; @@ -45,7 +46,7 @@ extern runtime WasmArrayCopy(Context, WasmArray, Smi, WasmArray, Smi, Smi): extern runtime WasmArrayNewSegment( Context, WasmTrustedInstanceData, Smi, Smi, Smi, Map): Object; extern runtime WasmStringNewSegmentWtf8( - Context, WasmTrustedInstanceData, Smi, Smi, Smi): String; + Context, WasmTrustedInstanceData, Smi, Smi, Smi, Smi): String; extern runtime WasmArrayInitSegment( Context, WasmTrustedInstanceData, Smi, WasmArray, Smi, Smi, Smi): JSAny; extern runtime WasmStringNewWtf8( @@ -82,6 +83,13 @@ extern macro Allocate(intptr): HeapObject; extern macro Allocate(intptr, constexpr AllocationFlag): HeapObject; } +macro NumberToInt32(input: Number): int32 { + return Convert(input); +} +macro NumberToUint32(input: Number): uint32 { + return Unsigned(Convert(input)); +} + namespace wasm { const kAnyType: constexpr int31 generates 'wasm::kWasmAnyRef.raw_bit_field()'; @@ -100,7 +108,7 @@ extern macro WasmBuiltinsAssembler::LoadContextFromInstanceData( WasmTrustedInstanceData): NativeContext; extern macro WasmBuiltinsAssembler::LoadTablesFromInstanceData( WasmTrustedInstanceData): FixedArray; -extern macro WasmBuiltinsAssembler::LoadInternalFunctionsFromInstanceData( +extern macro WasmBuiltinsAssembler::LoadFuncRefsFromInstanceData( WasmTrustedInstanceData): FixedArray; extern macro WasmBuiltinsAssembler::LoadManagedObjectMapsFromInstanceData( WasmTrustedInstanceData): FixedArray; @@ -118,18 +126,18 @@ builtin WasmInt32ToHeapNumber(val: int32): HeapNumber { } builtin WasmFuncRefToJS( - implicit context: Context)(val: WasmInternalFunction|WasmNull): JSFunction - |Null { + implicit context: Context)(val: WasmFuncRef|WasmNull): JSFunction|Null { typeswitch (val) { case (WasmNull): { return Null; } - case (func: WasmInternalFunction): { - const maybeExternal: Object = func.external; + case (func: WasmFuncRef): { + const internal: WasmInternalFunction = func.internal; + const maybeExternal: Object = internal.external; if (maybeExternal != Undefined) { return %RawDownCast(maybeExternal); } - tail runtime::WasmInternalFunctionCreateExternal(context, func); + tail runtime::WasmInternalFunctionCreateExternal(context, internal); } } } @@ -261,6 +269,7 @@ builtin WasmTableSet(tableIndex: intptr, index: uint32, value: Object): } } +// Returns WasmFuncRef or WasmNull, or throws an exception. builtin WasmTableGetFuncRef(tableIndex: intptr, index: uint32): Object { const trustedData: WasmTrustedInstanceData = LoadInstanceDataFromFrame(); const entryIndex: intptr = Signed(ChangeUint32ToWord(index)); @@ -274,16 +283,14 @@ builtin WasmTableGetFuncRef(tableIndex: intptr, index: uint32): Object { if (index >= entriesCount) goto IndexOutOfRange; const entries: FixedArray = table.entries; - const entry: Object = LoadFixedArrayElement(entries, entryIndex); + const entry: HeapObject = + UnsafeCast(LoadFixedArrayElement(entries, entryIndex)); - try { - const entryObject: HeapObject = - TaggedToHeapObject(entry) otherwise ReturnEntry; - if (IsTuple2Map(entryObject.map)) goto CallRuntime; - goto ReturnEntry; - } label ReturnEntry { - return entry; - } + dcheck(Is(entry) || Is(entry) || Is(entry)); + if (IsTuple2Map(entry.map)) goto CallRuntime; + if (Is(entry)) return entry; + dcheck(Is(entry)); + return entry; } label CallRuntime deferred { tail runtime::WasmFunctionTableGet( LoadContextFromInstanceData(trustedData), trustedData, @@ -302,8 +309,9 @@ builtin WasmFunctionTableGet(tableIndex: intptr, index: int32): Object { SmiFromIntPtr(tableIndex), SmiFromInt32(index)); } -builtin WasmTableSetFuncRef(tableIndex: intptr, index: uint32, value: Object): - Object { +builtin WasmTableSetFuncRef( + tableIndex: intptr, index: uint32, value: WasmFuncRef): Object { + dcheck(Is(value) || Is(value)); const trustedData: WasmTrustedInstanceData = LoadInstanceDataFromFrame(); const entryIndex: intptr = Signed(ChangeUint32ToWord(index)); try { @@ -327,14 +335,13 @@ builtin WasmTableSetFuncRef(tableIndex: intptr, index: uint32, value: Object): builtin WasmRefFunc(index: uint32): Object { const trustedData: WasmTrustedInstanceData = LoadInstanceDataFromFrame(); try { - const table: FixedArray = - LoadInternalFunctionsFromInstanceData(trustedData); - const functionIndex: intptr = Signed(ChangeUint32ToWord(index)); - const result: Object = LoadFixedArrayElement(table, functionIndex); - // {result} is either a funcref or nullptr. A Smi check is the fastest - // way to distinguish these two cases. - if (TaggedIsSmi(result)) goto CallRuntime; - return result; + const funcRefs: FixedArray = LoadFuncRefsFromInstanceData(trustedData); + const funcref: Object = funcRefs.objects[index]; + // {funcref} is either a WasmFuncRef or Smi::zero(). A Smi check is the + // fastest way to distinguish these two cases. + if (TaggedIsSmi(funcref)) goto CallRuntime; + dcheck(Is(funcref)); + return funcref; } label CallRuntime deferred { tail runtime::WasmRefFunc( LoadContextFromInstanceData(trustedData), trustedData, @@ -571,8 +578,8 @@ extern macro LoadWasmInternalFunctionInstructionStart(WasmInternalFunction): // // TODO(rstz): The counter might overflow if it exceeds the range of a Smi. // This can lead to incorrect inlining decisions. -macro UpdateIC( - vector: FixedArray, index: intptr, funcref: WasmInternalFunction): void { +macro UpdateIC(vector: FixedArray, index: intptr, funcref: WasmFuncRef): + void { const value = vector.objects[index]; if (value == funcref) { // Monomorphic hit. Check for this case first to maximize its performance. @@ -616,7 +623,7 @@ macro UpdateIC( newEntries.objects[newIndex + 1] = SmiConstant(1); vector.objects[index] = newEntries; } - } else if (Is(value)) { + } else if (Is(value)) { // Monomorphic miss. const newEntries = UnsafeCast(AllocateFixedArray( ElementsKind::PACKED_ELEMENTS, 4, AllocationFlag::kNone)); @@ -638,19 +645,20 @@ macro UpdateIC( // Liftoff uses the two returned values directly. struct TargetAndRef { target: RawPtr; - ref: WasmInstanceObject|WasmApiFunctionRef; + ref: WasmTrustedInstanceData|WasmApiFunctionRef; } -builtin CallRefIC( - vector: FixedArray, index: intptr, - funcref: WasmInternalFunction): TargetAndRef { +builtin CallRefIC(vector: FixedArray, index: intptr, funcref: WasmFuncRef): + TargetAndRef { + dcheck(Is(funcref)); UpdateIC(vector, index, funcref); - let target = funcref.call_target_ptr; + const internal = funcref.internal; + let target = internal.call_target_ptr; if (Signed(target) == IntPtrConstant(0)) { - target = LoadWasmInternalFunctionInstructionStart(funcref); + target = LoadWasmInternalFunctionInstructionStart(internal); } - return TargetAndRef{target: target, ref: funcref.ref}; + return TargetAndRef{target: target, ref: internal.ref}; } extern macro TryHasOwnProperty(HeapObject, Map, InstanceType, Name): never @@ -935,7 +943,7 @@ builtin WasmStringNewWtf16Array(array: WasmArray, start: uint32, end: uint32): // Torque's type checker for tail calls. builtin WasmStringFromDataSegment( segmentLength: uint32, arrayStart: uint32, arrayEnd: uint32, - segmentIndex: Smi, segmentOffset: Smi): JSAny { + segmentIndex: Smi, segmentOffset: Smi, variant: Smi): JSAny { const trustedData = LoadInstanceDataFromFrame(); try { const segmentOffsetU: uint32 = Unsigned(SmiToInt32(segmentOffset)); @@ -951,7 +959,7 @@ builtin WasmStringFromDataSegment( const smiLength = Convert(arrayLength) otherwise SegmentOOB; tail runtime::WasmStringNewSegmentWtf8( LoadContextFromInstanceData(trustedData), trustedData, segmentIndex, - smiOffset, smiLength); + smiOffset, smiLength, variant); } label SegmentOOB deferred { tail ThrowWasmTrapElementSegmentOutOfBounds(); } label ArrayOutOfBounds deferred { @@ -974,11 +982,11 @@ builtin WasmStringConst(index: uint32): String { } builtin WasmStringMeasureUtf8(string: String): int32 { const result = runtime::WasmStringMeasureUtf8(LoadContextFromFrame(), string); - return Signed(ChangeNumberToUint32(result)); + return NumberToInt32(result); } builtin WasmStringMeasureWtf8(string: String): int32 { const result = runtime::WasmStringMeasureWtf8(LoadContextFromFrame(), string); - return Signed(ChangeNumberToUint32(result)); + return NumberToInt32(result); } builtin WasmStringEncodeWtf8( string: String, offset: uint32, memory: Smi, utf8Variant: Smi): uint32 { @@ -986,7 +994,7 @@ builtin WasmStringEncodeWtf8( const result = runtime::WasmStringEncodeWtf8( LoadContextFromInstanceData(trustedData), trustedData, memory, utf8Variant, string, WasmUint32ToNumber(offset)); - return ChangeNumberToUint32(result); + return NumberToUint32(result); } builtin WasmStringEncodeWtf8Array( string: String, array: WasmArray, start: uint32, utf8Variant: Smi): uint32 { @@ -994,7 +1002,7 @@ builtin WasmStringEncodeWtf8Array( const result = runtime::WasmStringEncodeWtf8Array( LoadContextFromInstanceData(trustedData), utf8Variant, string, array, WasmUint32ToNumber(start)); - return ChangeNumberToUint32(result); + return NumberToUint32(result); } builtin WasmStringToUtf8Array(string: String): WasmArray { return runtime::WasmStringToUtf8Array(LoadContextFromFrame(), string); @@ -1092,7 +1100,7 @@ builtin WasmStringEqual(a: String, b: String): int32 { builtin WasmStringIsUSVSequence(str: String): int32 { if (IsOneByteStringInstanceType(str.instanceType)) return 1; const length = runtime::WasmStringMeasureUtf8(LoadContextFromFrame(), str); - if (Signed(ChangeNumberToUint32(length)) < 0) return 0; + if (NumberToInt32(length) < 0) return 0; return 1; } @@ -1422,4 +1430,24 @@ builtin WasmAnyConvertExtern(externObject: JSAny): JSAny { context, externObject, SmiConstant(kAnyType)); } +extern macro CallOrConstructBuiltinsAssembler::GetCompatibleReceiver( + JSReceiver, HeapObject, Context): JSReceiver; + +builtin WasmFastApiCallTypeCheckAndUpdateIC( + implicit context: Context)(data: WasmFastApiCallData, + receiver: JSAny): Smi { + try { + const rec = Cast(receiver) otherwise goto IllegalCast; + ModifyThreadInWasmFlag(0); + // We don't care about the actual compatible receiver; we just rely + // on this helper throwing an exception when there isn't one. + GetCompatibleReceiver(rec, data.signature, context); + ModifyThreadInWasmFlag(1); + data.cached_map = StrongToWeak(rec.map); + return 1; + } label IllegalCast { + const error = MessageTemplate::kIllegalInvocation; + runtime::WasmThrowTypeError(context, SmiConstant(error), Convert(0)); + } +} } // namespace wasm diff --git a/deps/v8/src/builtins/x64/builtins-x64.cc b/deps/v8/src/builtins/x64/builtins-x64.cc index 68d1f2c6641cef..b1b3a8bf4f1795 100644 --- a/deps/v8/src/builtins/x64/builtins-x64.cc +++ b/deps/v8/src/builtins/x64/builtins-x64.cc @@ -3996,7 +3996,8 @@ void SwitchToTheCentralStackIfNeeded(MacroAssembler* masm, __ Move(kCArgRegs[0], ER::isolate_address(masm->isolate())); __ Move(kCArgRegs[1], kOldSPRegister); __ PrepareCallCFunction(2); - __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2); + __ CallCFunction(ER::wasm_switch_to_the_central_stack(), 2, + SetIsolateDataSlots::kNo); __ movq(central_stack_sp, kReturnRegister0); __ popq(argc_input); @@ -4039,7 +4040,8 @@ void SwitchFromTheCentralStackIfNeeded(MacroAssembler* masm, __ Move(kCArgRegs[0], ER::isolate_address(masm->isolate())); __ PrepareCallCFunction(1); - __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1); + __ CallCFunction(ER::wasm_switch_from_the_central_stack(), 1, + SetIsolateDataSlots::kNo); __ popq(kReturnRegister1); __ popq(kReturnRegister0); @@ -4215,7 +4217,7 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ Move(kCArgRegs[1], 0); // argv. __ Move(kCArgRegs[2], ER::isolate_address(masm->isolate())); __ PrepareCallCFunction(3); - __ CallCFunction(find_handler, 3); + __ CallCFunction(find_handler, 3, SetIsolateDataSlots::kNo); } #ifdef V8_ENABLE_CET_SHADOW_STACK @@ -4366,7 +4368,8 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, argc = CallApiCallbackGenericDescriptor::ActualArgumentsCountRegister(); topmost_script_having_context = CallApiCallbackGenericDescriptor:: TopmostScriptHavingContextRegister(); - callback = CallApiCallbackGenericDescriptor::CallHandlerInfoRegister(); + callback = + CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister(); holder = CallApiCallbackGenericDescriptor::HolderRegister(); break; @@ -4425,8 +4428,9 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, __ Push(kScratchRegister); // kNewTarget switch (mode) { case CallApiCallbackMode::kGeneric: - __ PushTaggedField(FieldOperand(callback, CallHandlerInfo::kDataOffset), - scratch2); + __ PushTaggedField( + FieldOperand(callback, FunctionTemplateInfo::kCallbackDataOffset), + scratch2); break; case CallApiCallbackMode::kOptimizedNoProfiling: @@ -4472,16 +4476,15 @@ void Builtins::Generate_CallApiCallbackImpl(MacroAssembler* masm, // Target parameter. static_assert(ApiCallbackExitFrameConstants::kTargetOffset == 2 * kSystemPointerSize); - __ PushTaggedField( - FieldOperand(callback, CallHandlerInfo::kOwnerTemplateOffset), - scratch2); + __ Push(callback); __ PushReturnAddressFrom(scratch); __ LoadExternalPointerField( api_function_address, - FieldOperand(callback, CallHandlerInfo::kMaybeRedirectedCallbackOffset), - kCallHandlerInfoCallbackTag, kScratchRegister); + FieldOperand(callback, + FunctionTemplateInfo::kMaybeRedirectedCallbackOffset), + kFunctionTemplateInfoCallbackTag, kScratchRegister); __ EnterExitFrame(kApiStackSpace, StackFrame::API_CALLBACK_EXIT, api_function_address); @@ -4622,8 +4625,13 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) { Operand info_object = ExitFrameStackSlotOperand(0); __ movq(info_object, args_array); - // name_arg = Handle(&name), name value was pushed to GC-ed stack space. +#ifdef V8_ENABLE_DIRECT_LOCAL + // name_arg = Local(name), name value was pushed to GC-ed stack space. + __ movq(name_arg, Operand(args_array, -kSystemPointerSize)); +#else + // name_arg = Local(&name), name value was pushed to GC-ed stack space. __ leaq(name_arg, Operand(args_array, -kSystemPointerSize)); +#endif // The context register (rsi) might overlap with property_callback_info_arg // but the context value has been saved in EnterExitFrame and thus it could // be used to pass arguments. @@ -4723,7 +4731,7 @@ void Generate_DeoptimizationEntry(MacroAssembler* masm, __ LoadAddress(arg5, ExternalReference::isolate_address(isolate)); __ movq(Operand(rsp, 4 * kSystemPointerSize), arg5); #else - // r8 is kCArgRegs[4] on Linux + // r8 is kCArgRegs[4] on Linux. __ LoadAddress(r8, ExternalReference::isolate_address(isolate)); #endif diff --git a/deps/v8/src/codegen/OWNERS b/deps/v8/src/codegen/OWNERS index d476577f06dffa..5b17b9e31b4b14 100644 --- a/deps/v8/src/codegen/OWNERS +++ b/deps/v8/src/codegen/OWNERS @@ -5,9 +5,8 @@ ishell@chromium.org jgruber@chromium.org jkummerow@chromium.org leszeks@chromium.org -mslekova@chromium.org nicohartmann@chromium.org -tebbi@chromium.org victorgomes@chromium.org +dmercadier@chromium.org per-file compiler.*=marja@chromium.org diff --git a/deps/v8/src/codegen/arm/interface-descriptors-arm-inl.h b/deps/v8/src/codegen/arm/interface-descriptors-arm-inl.h index 2142ca9119c4c3..11f90f093f7132 100644 --- a/deps/v8/src/codegen/arm/interface-descriptors-arm-inl.h +++ b/deps/v8/src/codegen/arm/interface-descriptors-arm-inl.h @@ -92,6 +92,21 @@ constexpr Register KeyedLoadWithVectorDescriptor::VectorRegister() { return r3; } +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::EnumIndexRegister() { + return r4; +} + +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::CacheTypeRegister() { + return r5; +} + +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::SlotRegister() { + return r2; +} + // static constexpr Register KeyedHasICBaselineDescriptor::ReceiverRegister() { return kInterpreterAccumulatorRegister; @@ -347,7 +362,8 @@ CallApiCallbackGenericDescriptor::TopmostScriptHavingContextRegister() { return r1; } // static -constexpr Register CallApiCallbackGenericDescriptor::CallHandlerInfoRegister() { +constexpr Register +CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister() { return r3; } // static diff --git a/deps/v8/src/codegen/arm/macro-assembler-arm.cc b/deps/v8/src/codegen/arm/macro-assembler-arm.cc index 2310cd9e75b413..92215bd0fa6a0c 100644 --- a/deps/v8/src/codegen/arm/macro-assembler-arm.cc +++ b/deps/v8/src/codegen/arm/macro-assembler-arm.cc @@ -22,7 +22,7 @@ #include "src/debug/debug.h" #include "src/deoptimizer/deoptimizer.h" #include "src/execution/frames-inl.h" -#include "src/heap/memory-chunk.h" +#include "src/heap/mutable-page.h" #include "src/init/bootstrapper.h" #include "src/logging/counters.h" #include "src/objects/objects-inl.h" @@ -441,8 +441,7 @@ void MacroAssembler::TestCodeIsMarkedForDeoptimization(Register code, } Operand MacroAssembler::ClearedValue() const { - return Operand( - static_cast(HeapObjectReference::ClearedValue(isolate()).ptr())); + return Operand(static_cast(i::ClearedValue(isolate()).ptr())); } void MacroAssembler::Call(Label* target) { bl(target); } @@ -2783,6 +2782,13 @@ int MacroAssembler::CallCFunction(Register function, int num_reg_arguments, IsolateData::fast_c_call_caller_pc_offset())); str(fp, MemOperand(kRootRegister, IsolateData::fast_c_call_caller_fp_offset())); +#if DEBUG + // Reset Isolate::context field right before the fast C call such that the + // GC can visit this field unconditionally. This is necessary because + // CEntry sets it to kInvalidContext in debug build only. + mov(pc_scratch, Operand(Context::kNoContext)); + StoreRootRelative(IsolateData::context_offset(), pc_scratch); +#endif } else { DCHECK_NOT_NULL(isolate()); Register addr_scratch = r4; @@ -2794,7 +2800,15 @@ int MacroAssembler::CallCFunction(Register function, int num_reg_arguments, Move(addr_scratch, ExternalReference::fast_c_call_caller_fp_address(isolate())); str(fp, MemOperand(addr_scratch)); - +#if DEBUG + // Reset Isolate::context field right before the fast C call such that the + // GC can visit this field unconditionally. This is necessary because + // CEntry sets it to kInvalidContext in debug build only. + mov(pc_scratch, Operand(Context::kNoContext)); + str(pc_scratch, + ExternalReferenceAsOperand( + ExternalReference::context_address(isolate()), addr_scratch)); +#endif Pop(addr_scratch); } diff --git a/deps/v8/src/codegen/arm64/assembler-arm64.cc b/deps/v8/src/codegen/arm64/assembler-arm64.cc index 4c76a7af2f1a23..712978f1ec69d0 100644 --- a/deps/v8/src/codegen/arm64/assembler-arm64.cc +++ b/deps/v8/src/codegen/arm64/assembler-arm64.cc @@ -523,8 +523,7 @@ void Assembler::RemoveBranchFromLabelLinkChain(Instruction* branch, // currently referring to this label. label->Unuse(); } else { - label->link_to(static_cast(reinterpret_cast(next_link) - - buffer_start_)); + label->link_to(static_cast(InstructionOffset(next_link))); } } else if (branch == next_link) { @@ -545,6 +544,26 @@ void Assembler::RemoveBranchFromLabelLinkChain(Instruction* branch, next_link = link->ImmPCOffsetTarget(); end_of_chain = (link == next_link); link->SetImmPCOffsetTarget(options(), label_veneer); + // {link} is now resolved; remove it from {unresolved_branches_} so + // we won't later try to process it again, which would fail because + // by walking the chain of its label's unresolved branch instructions, + // we won't find it: {prev_link} is now the end of that chain after + // its update above. + if (link->IsCondBranchImm() || link->IsCompareBranch()) { + static_assert(Instruction::ImmBranchRange(CondBranchType) == + Instruction::ImmBranchRange(CompareBranchType)); + int max_reachable_pc = static_cast(InstructionOffset(link)) + + Instruction::ImmBranchRange(CondBranchType); + unresolved_branches_.erase(max_reachable_pc); + } else if (link->IsTestBranch()) { + // Add 1 to account for branch type tag bit. + int max_reachable_pc = static_cast(InstructionOffset(link)) + + Instruction::ImmBranchRange(TestBranchType) + + 1; + unresolved_branches_.erase(max_reachable_pc); + } else { + // Other branch types are not handled by veneers. + } link = next_link; } } else { @@ -4713,14 +4732,6 @@ void Assembler::EmitVeneers(bool force_emit, bool need_protection, } } - // Update next_veneer_pool_check_ (tightly coupled with unresolved_branches_). - if (unresolved_branches_.empty()) { - next_veneer_pool_check_ = kMaxInt; - } else { - next_veneer_pool_check_ = - unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; - } - // Reminder: We iterate in reverse order to avoid duplicate linked-list // iteration in RemoveBranchFromLabelLinkChain (which starts at the target // label, and iterates backwards through linked branch instructions). @@ -4733,6 +4744,16 @@ void Assembler::EmitVeneers(bool force_emit, bool need_protection, RemoveBranchFromLabelLinkChain(branch, tasks[i].label_, veneer); } + // Update next_veneer_pool_check_ (tightly coupled with unresolved_branches_). + // This must happen after the calls to {RemoveBranchFromLabelLinkChain}, + // because that function can resolve additional branches. + if (unresolved_branches_.empty()) { + next_veneer_pool_check_ = kMaxInt; + } else { + next_veneer_pool_check_ = + unresolved_branches_first_limit() - kVeneerDistanceCheckMargin; + } + // Now emit the actual veneer and patch up the incoming branch. for (const FarBranchInfo& info : tasks) { diff --git a/deps/v8/src/codegen/arm64/decoder-arm64-inl.h b/deps/v8/src/codegen/arm64/decoder-arm64-inl.h index aed10853571c46..4b82e94b210561 100644 --- a/deps/v8/src/codegen/arm64/decoder-arm64-inl.h +++ b/deps/v8/src/codegen/arm64/decoder-arm64-inl.h @@ -5,7 +5,6 @@ #ifndef V8_CODEGEN_ARM64_DECODER_ARM64_INL_H_ #define V8_CODEGEN_ARM64_DECODER_ARM64_INL_H_ -#include "src/base/v8-fallthrough.h" #include "src/codegen/arm64/decoder-arm64.h" namespace v8 { @@ -475,7 +474,7 @@ void Decoder::DecodeDataProcessing(Instruction* instr) { } break; } - V8_FALLTHROUGH; + [[fallthrough]]; } case 1: case 3: diff --git a/deps/v8/src/codegen/arm64/interface-descriptors-arm64-inl.h b/deps/v8/src/codegen/arm64/interface-descriptors-arm64-inl.h index 0502dc16737acf..e8a51ae64c33f9 100644 --- a/deps/v8/src/codegen/arm64/interface-descriptors-arm64-inl.h +++ b/deps/v8/src/codegen/arm64/interface-descriptors-arm64-inl.h @@ -85,6 +85,21 @@ constexpr Register KeyedLoadWithVectorDescriptor::VectorRegister() { return x3; } +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::EnumIndexRegister() { + return x4; +} + +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::CacheTypeRegister() { + return x5; +} + +// static +constexpr Register EnumeratedKeyedLoadBaselineDescriptor::SlotRegister() { + return x2; +} + // static constexpr Register KeyedHasICBaselineDescriptor::ReceiverRegister() { return kInterpreterAccumulatorRegister; @@ -346,7 +361,8 @@ CallApiCallbackGenericDescriptor::TopmostScriptHavingContextRegister() { return x1; } // static -constexpr Register CallApiCallbackGenericDescriptor::CallHandlerInfoRegister() { +constexpr Register +CallApiCallbackGenericDescriptor::FunctionTemplateInfoRegister() { return x3; } // static diff --git a/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h b/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h index 0eda579b9d12fb..29229dc6f60049 100644 --- a/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h +++ b/deps/v8/src/codegen/arm64/macro-assembler-arm64-inl.h @@ -1136,6 +1136,8 @@ void MacroAssembler::Uxtw(const Register& rd, const Register& rn) { void MacroAssembler::InitializeRootRegister() { ExternalReference isolate_root = ExternalReference::isolate_root(isolate()); Mov(kRootRegister, Operand(isolate_root)); + Fmov(fp_zero, 0.0); + #ifdef V8_COMPRESS_POINTERS LoadRootRelative(kPtrComprCageBaseRegister, IsolateData::cage_base_offset()); #endif diff --git a/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc b/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc index 636dcdc874382c..9553771259fbe2 100644 --- a/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc +++ b/deps/v8/src/codegen/arm64/macro-assembler-arm64.cc @@ -19,7 +19,7 @@ #include "src/deoptimizer/deoptimizer.h" #include "src/execution/frame-constants.h" #include "src/execution/frames-inl.h" -#include "src/heap/memory-chunk.h" +#include "src/heap/mutable-page.h" #include "src/init/bootstrapper.h" #include "src/logging/counters.h" #include "src/runtime/runtime.h" @@ -2091,13 +2091,20 @@ int MacroAssembler::CallCFunction(Register function, int num_of_reg_args, DCHECK(has_frame()); Label get_pc; + UseScratchRegisterScope temps(this); + // We're doing a C call, which means non-parameter caller-saved registers + // (x8-x17) will be clobbered and so are available to use as scratches. + // In the worst-case scenario, we'll need 2 scratch registers. We pick 3 + // registers minus the `function` register, in case `function` aliases with + // any of the registers. + temps.Include(CPURegList(64, {x8, x9, x10, function})); + temps.Exclude(function); if (set_isolate_data_slots == SetIsolateDataSlots::kYes) { // Save the frame pointer and PC so that the stack layout remains iterable, // even without an ExitFrame which normally exists between JS and C frames. - Register pc_scratch = x4; - Register addr_scratch = x5; - Push(pc_scratch, addr_scratch); + UseScratchRegisterScope temps(this); + Register pc_scratch = temps.AcquireX(); Adr(pc_scratch, &get_pc); @@ -2107,7 +2114,16 @@ int MacroAssembler::CallCFunction(Register function, int num_of_reg_args, static_assert(IsolateData::fast_c_call_caller_pc_offset() == fp_offset + 8); Stp(fp, pc_scratch, MemOperand(kRootRegister, fp_offset)); + +#if DEBUG + // Reset Isolate::context field right before the fast C call such that the + // GC can visit this field unconditionally. This is necessary because + // CEntry sets it to kInvalidContext in debug build only. + static_assert(Context::kNoContext == 0); + StoreRootRelative(IsolateData::context_offset(), xzr); +#endif } else { + Register addr_scratch = temps.AcquireX(); DCHECK_NOT_NULL(isolate()); Mov(addr_scratch, ExternalReference::fast_c_call_caller_pc_address(isolate())); @@ -2115,9 +2131,16 @@ int MacroAssembler::CallCFunction(Register function, int num_of_reg_args, Mov(addr_scratch, ExternalReference::fast_c_call_caller_fp_address(isolate())); Str(fp, MemOperand(addr_scratch)); +#if DEBUG + // Reset Isolate::context field right before the fast C call such that the + // GC can visit this field unconditionally. This is necessary because + // CEntry sets it to kInvalidContext in debug build only. + static_assert(Context::kNoContext == 0); + Str(xzr, + ExternalReferenceAsOperand( + ExternalReference::context_address(isolate()), addr_scratch)); +#endif } - - Pop(addr_scratch, pc_scratch); } // Call directly. The function called cannot cause a GC, or allow preemption, @@ -2134,12 +2157,11 @@ int MacroAssembler::CallCFunction(Register function, int num_of_reg_args, IsolateData::fast_c_call_caller_fp_offset())); } else { DCHECK_NOT_NULL(isolate()); - Register addr_scratch = x5; - Push(addr_scratch, xzr); + UseScratchRegisterScope temps(this); + Register addr_scratch = temps.AcquireX(); Mov(addr_scratch, ExternalReference::fast_c_call_caller_fp_address(isolate())); Str(xzr, MemOperand(addr_scratch)); - Pop(xzr, addr_scratch); } } @@ -2841,8 +2863,7 @@ void MacroAssembler::JumpIfCodeIsTurbofanned(Register code, Register scratch, } Operand MacroAssembler::ClearedValue() const { - return Operand( - static_cast(HeapObjectReference::ClearedValue(isolate()).ptr())); + return Operand(static_cast(i::ClearedValue(isolate()).ptr())); } Operand MacroAssembler::ReceiverOperand() { return Operand(0); } @@ -3493,7 +3514,7 @@ void MacroAssembler::CheckPageFlag(const Register& object, int mask, ASM_CODE_COMMENT(this); UseScratchRegisterScope temps(this); Register scratch = temps.AcquireX(); - And(scratch, object, ~MemoryChunkHeader::GetAlignmentMaskForAssembler()); + And(scratch, object, ~MemoryChunk::GetAlignmentMaskForAssembler()); Ldr(scratch, MemOperand(scratch, MemoryChunkLayout::kFlagsOffset)); if (cc == ne) { TestAndBranchIfAnySet(scratch, mask, condition_met); @@ -3698,9 +3719,10 @@ void MacroAssembler::ResolveTrustedPointerHandle(Register destination, Mov(handle, Operand(handle, LSR, kTrustedPointerHandleShift)); Ldr(destination, MemOperand(table, handle, LSL, kTrustedPointerTableEntrySizeLog2)); - // The LSB is used as marking bit by the trusted pointer table, so here we - // have to set it using a bitwise OR as it may or may not be set. - Orr(destination, destination, Immediate(kHeapObjectTag)); + // Untag the pointer and remove the marking bit in one operation. + Register tag_reg = handle; + Mov(tag_reg, Immediate(~(tag | kTrustedPointerTableMarkBit))); + And(destination, destination, tag_reg); } void MacroAssembler::ResolveCodePointerHandle(Register destination, diff --git a/deps/v8/src/codegen/bailout-reason.cc b/deps/v8/src/codegen/bailout-reason.cc index f4573fbe9c17a5..9a26f3c112bda6 100644 --- a/deps/v8/src/codegen/bailout-reason.cc +++ b/deps/v8/src/codegen/bailout-reason.cc @@ -11,7 +11,11 @@ namespace internal { #define ERROR_MESSAGES_TEXTS(C, T) T, const char* GetBailoutReason(BailoutReason reason) { - DCHECK_LT(reason, BailoutReason::kLastErrorMessage); + // Currently, the BailoutReason is read from the SharedFunctionInfo object + // inside the sandbox and must therefore be considered untrusted. As such, it + // needs to be validated here. + static_assert(std::is_unsigned_v>); + SBXCHECK_LT(reason, BailoutReason::kLastErrorMessage); DCHECK_GE(reason, BailoutReason::kNoReason); static const char* error_messages_[] = { BAILOUT_MESSAGES_LIST(ERROR_MESSAGES_TEXTS)}; diff --git a/deps/v8/src/codegen/code-stub-assembler.cc b/deps/v8/src/codegen/code-stub-assembler.cc index 82774268e409e7..5f336e2744f09c 100644 --- a/deps/v8/src/codegen/code-stub-assembler.cc +++ b/deps/v8/src/codegen/code-stub-assembler.cc @@ -18,8 +18,8 @@ #include "src/execution/frames-inl.h" #include "src/execution/frames.h" #include "src/execution/protectors.h" -#include "src/heap/heap-inl.h" // For MemoryChunk. TODO(jkummerow): Drop. -#include "src/heap/memory-chunk.h" +#include "src/heap/heap-inl.h" // For MutablePageMetadata. TODO(jkummerow): Drop. +#include "src/heap/mutable-page.h" #include "src/logging/counters.h" #include "src/numbers/integer-literal-inl.h" #include "src/objects/api-callbacks.h" @@ -37,6 +37,7 @@ #include "src/objects/property-descriptor-object.h" #include "src/objects/tagged-field.h" #include "src/roots/roots.h" +#include "third_party/v8/codegen/fp16-inl.h" namespace v8 { namespace internal { @@ -213,7 +214,7 @@ void CodeStubAssembler::FailAssert( } } std::string files_and_lines_text = stream.str(); - if (files_and_lines_text.size() != 0) { + if (!files_and_lines_text.empty()) { SNPrintF(chars, "%s%s", message, files_and_lines_text.c_str()); message = chars.begin(); } @@ -1313,6 +1314,10 @@ TNode CodeStubAssembler::TruncateIntPtrToInt32(TNode value) { return ReinterpretCast(value); } +TNode CodeStubAssembler::TruncateWord64ToWord32(TNode value) { + return TruncateInt64ToInt32(ReinterpretCast(value)); +} + TNode CodeStubAssembler::TaggedIsSmi(TNode a) { static_assert(kSmiTagMask < kMaxUInt32); return Word32Equal( @@ -1915,10 +1920,9 @@ TNode CodeStubAssembler::ResolveTrustedPointerHandle( TNode offset = ChangeUint32ToWord(Word32Shl( index, UniqueUint32Constant(kTrustedPointerTableEntrySizeLog2))); TNode value = Load(table, offset); - // The LSB is used as marking bit by the code pointer table, so here we have - // to set it using a bitwise OR as it may or may not be set. - value = - UncheckedCast(WordOr(value, UintPtrConstant(kHeapObjectTag))); + // Untag the pointer and remove the marking bit in one operation. + value = UncheckedCast( + WordAnd(value, UintPtrConstant(~(tag | kTrustedPointerTableMarkBit)))); return UncheckedCast(BitcastWordToTagged(value)); } @@ -1950,20 +1954,6 @@ TNode CodeStubAssembler::LoadCodeEntrypointViaCodePointerField( } #endif // V8_ENABLE_SANDBOX -TNode CodeStubAssembler::LoadProtectedPointerFromObject( - TNode object, int offset) { -#ifdef V8_ENABLE_SANDBOX - TNode trusted_cage_base = LoadPointerFromRootRegister( - IntPtrConstant(IsolateData::trusted_cage_base_offset())); - TNode offset_from_cage_base = - ChangeUint32ToWord(LoadObjectField(object, offset)); - TNode pointer = - UncheckedCast(WordOr(trusted_cage_base, offset_from_cage_base)); - return UncheckedCast(BitcastWordToTagged(pointer)); -#else - return LoadObjectField(object, offset); -#endif -} TNode CodeStubAssembler::LoadFromParentFrame(int offset) { TNode frame_pointer = LoadParentFramePointer(); @@ -2667,10 +2657,12 @@ TNode CodeStubAssembler::LoadArrayElement(TNode array, TNode index_node, int additional_offset) { // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants? - static_assert(std::is_same::value || - std::is_same::value || - std::is_same::value, - "Only Smi, UintPtrT or IntPtrT indices are allowed"); + static_assert( + std::is_same::value || + std::is_same::value || + std::is_same::value || + std::is_same::value, + "Only Smi, UintPtrT, IntPtrT or TaggedIndex indices are allowed"); CSA_DCHECK(this, IntPtrGreaterThanOrEqual(ParameterToIntPtr(index_node), IntPtrConstant(0))); DCHECK(IsAligned(additional_offset, kTaggedSize)); @@ -2703,10 +2695,12 @@ TNode CodeStubAssembler::LoadFixedArrayElement( TNode object, TNode index, int additional_offset, CheckBounds check_bounds) { // TODO(v8:9708): Do we want to keep both IntPtrT and UintPtrT variants? - static_assert(std::is_same::value || - std::is_same::value || - std::is_same::value, - "Only Smi, UintPtrT or IntPtrT indexes are allowed"); + static_assert( + std::is_same::value || + std::is_same::value || + std::is_same::value || + std::is_same::value, + "Only Smi, UintPtrT, IntPtrT or TaggedIndex indexes are allowed"); CSA_DCHECK(this, IsFixedArraySubclass(object)); CSA_DCHECK(this, IsNotWeakFixedArraySubclass(object)); @@ -2722,6 +2716,10 @@ template V8_EXPORT_PRIVATE TNode CodeStubAssembler::LoadFixedArrayElement(TNode, TNode, int, CheckBounds); template V8_EXPORT_PRIVATE TNode +CodeStubAssembler::LoadFixedArrayElement(TNode, + TNode, int, + CheckBounds); +template V8_EXPORT_PRIVATE TNode CodeStubAssembler::LoadFixedArrayElement(TNode, TNode, int, CheckBounds); @@ -2767,6 +2765,19 @@ TNode CodeStubAssembler::LoadPropertyArrayElement( additional_offset)); } +void CodeStubAssembler::FixedArrayBoundsCheck(TNode array, + TNode index, + int additional_offset) { + if (!v8_flags.fixed_array_bounds_checks) return; + DCHECK(IsAligned(additional_offset, kTaggedSize)); + // IntPtrAdd does constant-folding automatically. + TNode effective_index = + IntPtrAdd(TaggedIndexToIntPtr(index), + IntPtrConstant(additional_offset / kTaggedSize)); + CSA_CHECK(this, UintPtrLessThan(effective_index, + LoadAndUntagFixedArrayBaseLength(array))); +} + TNode CodeStubAssembler::LoadPropertyArrayLength( TNode object) { TNode value = LoadAndUntagToWord32ObjectField( @@ -3005,6 +3016,9 @@ TNode CodeStubAssembler::LoadFixedTypedArrayElementAsTagged( return ChangeUint32ToTagged(Load(data_pointer, offset)); case INT32_ELEMENTS: return ChangeInt32ToTagged(Load(data_pointer, offset)); + case FLOAT16_ELEMENTS: + return AllocateHeapNumberWithValue( + ChangeFloat16ToFloat64(Load(data_pointer, offset))); case FLOAT32_ELEMENTS: return AllocateHeapNumberWithValue( ChangeFloat32ToFloat64(Load(data_pointer, offset))); @@ -3492,8 +3506,8 @@ TNode CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray( this, Word32Equal(DecodeWord32(code_flags), Int32Constant(static_cast(CodeKind::BASELINE)))); #endif // DEBUG - TNode baseline_data = LoadProtectedPointerFromObject( - code, Code::kDeoptimizationDataOrInterpreterDataOffset); + TNode baseline_data = CAST(LoadProtectedPointerField( + code, Code::kDeoptimizationDataOrInterpreterDataOffset)); var_result = baseline_data; } Goto(&check_for_interpreter_data); @@ -3501,7 +3515,7 @@ TNode CodeStubAssembler::LoadSharedFunctionInfoBytecodeArray( BIND(&check_for_interpreter_data); GotoIfNot(HasInstanceType(var_result.value(), INTERPRETER_DATA_TYPE), &done); - TNode bytecode_array = CAST(LoadProtectedPointerFromObject( + TNode bytecode_array = CAST(LoadProtectedPointerField( CAST(var_result.value()), InterpreterData::kBytecodeArrayOffset)); var_result = bytecode_array; Goto(&done); @@ -3595,12 +3609,11 @@ void CodeStubAssembler::UnsafeStoreObjectFieldNoWriteBarrier( void CodeStubAssembler::StoreSharedObjectField(TNode object, TNode offset, TNode value) { - CSA_DCHECK( - this, - WordNotEqual( - WordAnd(LoadBasicMemoryChunkFlags(object), - IntPtrConstant(BasicMemoryChunk::IN_WRITABLE_SHARED_SPACE)), - IntPtrConstant(0))); + CSA_DCHECK(this, + WordNotEqual( + WordAnd(LoadBasicMemoryChunkFlags(object), + IntPtrConstant(MemoryChunk::IN_WRITABLE_SHARED_SPACE)), + IntPtrConstant(0))); int const_offset; if (TryToInt32Constant(offset, &const_offset)) { StoreObjectField(object, const_offset, value); @@ -5060,7 +5073,7 @@ TNode CodeStubAssembler::ExtractToFixedArray( #ifndef V8_ENABLE_SINGLE_GENERATION #ifdef DEBUG TNode object_word = BitcastTaggedToWord(to_elements); - TNode object_page_header = PageHeaderFromAddress(object_word); + TNode object_page_header = MemoryChunkFromAddress(object_word); TNode page_flags = Load( object_page_header, IntPtrConstant(MemoryChunkLayout::kFlagsOffset)); CSA_DCHECK( @@ -5461,7 +5474,7 @@ void CodeStubAssembler::JumpIfPointersFromHereAreInteresting( TNode object, Label* interesting) { Label finished(this); TNode object_word = BitcastTaggedToWord(object); - TNode object_page_header = PageHeaderFromAddress(object_word); + TNode object_page_header = MemoryChunkFromAddress(object_word); TNode page_flags = UncheckedCast( Load(MachineType::IntPtr(), object_page_header, IntPtrConstant(MemoryChunkLayout::kFlagsOffset))); @@ -6380,7 +6393,6 @@ TNode CodeStubAssembler::TryFloat64ToSmi(TNode value, TNode value64 = ChangeInt32ToFloat64(value32); Label if_int32(this); - GotoIfNot(Float64Equal(value, value64), not_smi); GotoIfNot(Word32Equal(value32, Int32Constant(0)), &if_int32); Branch(Int32LessThan(UncheckedCast(Float64ExtractHighWord32(value)), @@ -6400,6 +6412,203 @@ TNode CodeStubAssembler::TryFloat64ToSmi(TNode value, } } +TNode CodeStubAssembler::TruncateFloat64ToFloat16( + TNode value) { + // This is a verbatim CSA implementation of DoubleToFloat16. + // + // The 64-bit and 32-bit paths are implemented separately, but the algorithm + // is the same in both cases. The 32-bit version requires manual pairwise + // operations. + + if (Is64()) { + TVARIABLE(Uint16T, out); + TNode signed_in = BitcastFloat64ToInt64(value); + + // Take the absolute value of the input. + TNode sign = Word64And(signed_in, Uint64Constant(kFP64SignMask)); + TNode in = Word64Xor(signed_in, sign); + + Label if_infinity_or_nan(this), if_finite(this), done(this); + Branch(Uint64GreaterThanOrEqual(in, + Uint64Constant(kFP16InfinityAndNaNInfimum)), + &if_infinity_or_nan, &if_finite); + + BIND(&if_infinity_or_nan); + { + // Result is infinity or NaN. + out = Select( + Uint64GreaterThan(in, Uint64Constant(kFP64Infinity)), + [=] { return Uint16Constant(kFP16qNaN); }, // NaN->qNaN + [=] { return Uint16Constant(kFP16Infinity); }); // Inf->Inf + Goto(&done); + } + + BIND(&if_finite); + { + // Result is a (de)normalized number or zero. + + Label if_denormal(this), not_denormal(this); + Branch(Uint64LessThan(in, Uint64Constant(kFP16DenormalThreshold)), + &if_denormal, ¬_denormal); + + BIND(&if_denormal); + { + // Result is a denormal or zero. Use the magic value and FP addition to + // align 10 mantissa bits at the bottom of the float. Depends on FP + // addition being round-to-nearest-even. + TNode temp = Float64Add( + BitcastInt64ToFloat64(ReinterpretCast(in)), + Float64Constant(base::bit_cast(kFP64To16DenormalMagic))); + out = ReinterpretCast(TruncateWord64ToWord32( + Uint64Sub(ReinterpretCast(BitcastFloat64ToInt64(temp)), + Uint64Constant(kFP64To16DenormalMagic)))); + Goto(&done); + } + + BIND(¬_denormal); + { + // Result is not a denormal. + + // Remember if the result mantissa will be odd before rounding. + TNode mant_odd = ReinterpretCast(Word64And( + Word64Shr(in, Int64Constant(kFP64MantissaBits - kFP16MantissaBits)), + Uint64Constant(1))); + + // Update the exponent and round to nearest even. + // + // Rounding to nearest even is handled in two parts. First, adding + // kFP64To16RebiasExponentAndRound has the effect of rebiasing the + // exponent and that if any of the lower 41 bits of the mantissa are + // set, the 11th mantissa bit from the front becomes set. Second, adding + // mant_odd ensures ties are rounded to even. + TNode temp1 = + Uint64Add(ReinterpretCast(in), + Uint64Constant(kFP64To16RebiasExponentAndRound)); + TNode temp2 = Uint64Add(temp1, mant_odd); + + out = ReinterpretCast(TruncateWord64ToWord32(Word64Shr( + temp2, Int64Constant(kFP64MantissaBits - kFP16MantissaBits)))); + + Goto(&done); + } + } + + BIND(&done); + return ReinterpretCast( + Word32Or(TruncateWord64ToWord32(Word64Shr(sign, Int64Constant(48))), + out.value())); + } else { + TVARIABLE(Uint16T, out); + TNode signed_in_hi_word = Float64ExtractHighWord32(value); + TNode in_lo_word = Float64ExtractLowWord32(value); + + // Take the absolute value of the input. + TNode sign = Word32And( + signed_in_hi_word, Uint64HighWordConstantNoLowWord(kFP64SignMask)); + TNode in_hi_word = Word32Xor(signed_in_hi_word, sign); + + Label if_infinity_or_nan(this), if_finite(this), done(this); + Branch(Uint32GreaterThanOrEqual( + in_hi_word, + Uint64HighWordConstantNoLowWord(kFP16InfinityAndNaNInfimum)), + &if_infinity_or_nan, &if_finite); + + BIND(&if_infinity_or_nan); + { + // Result is infinity or NaN. + out = Select( + Uint32GreaterThan(in_hi_word, + Uint64HighWordConstantNoLowWord(kFP64Infinity)), + [=] { return Uint16Constant(kFP16qNaN); }, // NaN->qNaN + [=] { return Uint16Constant(kFP16Infinity); }); // Inf->Inf + Goto(&done); + } + + BIND(&if_finite); + { + // Result is a (de)normalized number or zero. + + Label if_denormal(this), not_denormal(this); + Branch(Uint32LessThan(in_hi_word, Uint64HighWordConstantNoLowWord( + kFP16DenormalThreshold)), + &if_denormal, ¬_denormal); + + BIND(&if_denormal); + { + // Result is a denormal or zero. Use the magic value and FP addition to + // align 10 mantissa bits at the bottom of the float. Depends on FP + // addition being round-to-nearest-even. + TNode double_in = Float64InsertHighWord32( + Float64InsertLowWord32(Float64Constant(0), in_lo_word), in_hi_word); + TNode temp = Float64Add( + double_in, + Float64Constant(base::bit_cast(kFP64To16DenormalMagic))); + out = ReinterpretCast(Projection<0>(Int32PairSub( + Float64ExtractLowWord32(temp), Float64ExtractHighWord32(temp), + Uint64LowWordConstant(kFP64To16DenormalMagic), + Uint64HighWordConstant(kFP64To16DenormalMagic)))); + + Goto(&done); + } + + BIND(¬_denormal); + { + // Result is not a denormal. + + // Remember if the result mantissa will be odd before rounding. + TNode mant_odd = ReinterpretCast(Word32And( + Word32Shr(in_hi_word, Int32Constant(kFP64MantissaBits - + kFP16MantissaBits - 32)), + Uint32Constant(1))); + + // Update the exponent and round to nearest even. + // + // Rounding to nearest even is handled in two parts. First, adding + // kFP64To16RebiasExponentAndRound has the effect of rebiasing the + // exponent and that if any of the lower 41 bits of the mantissa are + // set, the 11th mantissa bit from the front becomes set. Second, adding + // mant_odd ensures ties are rounded to even. + TNode> temp1 = Int32PairAdd( + in_lo_word, in_hi_word, + Uint64LowWordConstant(kFP64To16RebiasExponentAndRound), + Uint64HighWordConstant(kFP64To16RebiasExponentAndRound)); + TNode> temp2 = + Int32PairAdd(Projection<0>(temp1), Projection<1>(temp1), mant_odd, + Int32Constant(0)); + + out = ReinterpretCast((Word32Shr( + Projection<1>(temp2), + Int32Constant(kFP64MantissaBits - kFP16MantissaBits - 32)))); + + Goto(&done); + } + } + + BIND(&done); + return ReinterpretCast( + Word32Or(Word32Shr(sign, Int32Constant(16)), out.value())); + } +} + +TNode CodeStubAssembler::BitcastFloat16ToUint32( + TNode value) { + return ReinterpretCast(value); +} + +TNode CodeStubAssembler::BitcastUint32ToFloat16( + TNode value) { + return ReinterpretCast(value); +} + +TNode CodeStubAssembler::RoundInt32ToFloat16(TNode value) { + return TruncateFloat32ToFloat16(RoundInt32ToFloat32(value)); +} + +TNode CodeStubAssembler::ChangeFloat16ToFloat64( + TNode value) { + return ChangeFloat32ToFloat64(ChangeFloat16ToFloat32(value)); +} + TNode CodeStubAssembler::ChangeFloat32ToTagged(TNode value) { Label not_smi(this), done(this); TVARIABLE(Number, var_result); @@ -6574,7 +6783,9 @@ TNode CodeStubAssembler::ToThisString(TNode context, return CAST(var_value.value()); } -TNode CodeStubAssembler::ChangeNumberToUint32(TNode value) { +// This has platform-specific and ill-defined behavior for negative inputs. +TNode CodeStubAssembler::ChangeNonNegativeNumberToUint32( + TNode value) { TVARIABLE(Uint32T, var_result); Label if_smi(this), if_heapnumber(this, Label::kDeferred), done(this); Branch(TaggedIsSmi(value), &if_smi, &if_heapnumber); @@ -7533,6 +7744,10 @@ TNode CodeStubAssembler::IsString(TNode object) { #endif } +TNode CodeStubAssembler::IsStringWrapper(TNode object) { + return IsStringWrapperElementsKind(LoadMap(object)); +} + TNode CodeStubAssembler::IsSeqOneByteString(TNode object) { return IsSeqOneByteStringInstanceType(LoadInstanceType(object)); } @@ -7867,7 +8082,7 @@ TNode CodeStubAssembler::IsNumberArrayIndex(TNode number) { TNode CodeStubAssembler::LoadBasicMemoryChunkFlags( TNode object) { TNode object_word = BitcastTaggedToWord(object); - TNode page_header = PageHeaderFromAddress(object_word); + TNode page_header = MemoryChunkFromAddress(object_word); return UncheckedCast( Load(MachineType::Pointer(), page_header, IntPtrConstant(MemoryChunkLayout::kFlagsOffset))); @@ -9454,8 +9669,7 @@ TNode CodeStubAssembler::NameToIndexHashTableLookup( template void CodeStubAssembler::NameDictionaryLookup( TNode dictionary, TNode unique_name, Label* if_found, - TVariable* var_name_index, Label* if_not_found_no_insertion_index, - LookupMode mode, Label* if_not_found_with_insertion_index) { + TVariable* var_name_index, Label* if_not_found, LookupMode mode) { static_assert(std::is_same::value || std::is_same::value || std::is_same::value, @@ -9463,13 +9677,8 @@ void CodeStubAssembler::NameDictionaryLookup( DCHECK_IMPLIES(var_name_index != nullptr, MachineType::PointerRepresentation() == var_name_index->rep()); DCHECK_IMPLIES(mode == kFindInsertionIndex, if_found == nullptr); - DCHECK_IMPLIES(if_not_found_with_insertion_index != nullptr, - var_name_index != nullptr); Comment("NameDictionaryLookup"); CSA_DCHECK(this, IsUniqueName(unique_name)); - if (if_not_found_with_insertion_index == nullptr) { - if_not_found_with_insertion_index = if_not_found_no_insertion_index; - } Label if_not_computed(this, Label::kDeferred); @@ -9503,17 +9712,19 @@ void CodeStubAssembler::NameDictionaryLookup( TNode current = CAST(UnsafeLoadFixedArrayElement(dictionary, index)); - GotoIf(TaggedEqual(current, undefined), if_not_found_with_insertion_index); - if (mode == kFindExisting) { - if (Dictionary::ShapeT::kMatchNeedsHoleCheck) { - GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe); - } - current = LoadName(current); - GotoIf(TaggedEqual(current, unique_name), if_found); - } else { - DCHECK_EQ(kFindInsertionIndex, mode); - GotoIf(TaggedEqual(current, TheHoleConstant()), - if_not_found_with_insertion_index); + GotoIf(TaggedEqual(current, undefined), if_not_found); + switch (mode) { + case kFindInsertionIndex: + GotoIf(TaggedEqual(current, TheHoleConstant()), if_not_found); + break; + case kFindExisting: + case kFindExistingOrInsertionIndex: + if (Dictionary::TodoShape::kMatchNeedsHoleCheck) { + GotoIf(TaggedEqual(current, TheHoleConstant()), &next_probe); + } + current = LoadName(current); + GotoIf(TaggedEqual(current, unique_name), if_found); + break; } Goto(&next_probe); @@ -9532,46 +9743,8 @@ void CodeStubAssembler::NameDictionaryLookup( // memory features turned on. To minimize affecting the fast path, the // forwarding index branch defers both fetching the actual hash value and // the dictionary lookup to the runtime. - using ER = ExternalReference; // To avoid super long lines below. - ER func_ref; - if constexpr (std::is_same::value) { - func_ref = - mode == kFindExisting - ? ER::name_dictionary_lookup_forwarded_string() - : ER::name_dictionary_find_insertion_entry_forwarded_string(); - } else if constexpr (std::is_same::value) { - func_ref = - mode == kFindExisting - ? ER::global_dictionary_lookup_forwarded_string() - : ER::global_dictionary_find_insertion_entry_forwarded_string(); - } else { - auto ref0 = ER::name_to_index_hashtable_lookup_forwarded_string(); - auto ref1 = - ER::name_to_index_hashtable_find_insertion_entry_forwarded_string(); - func_ref = mode == kFindExisting ? ref0 : ref1; - } - const TNode function = ExternalConstant(func_ref); - const TNode isolate_ptr = - ExternalConstant(ER::isolate_address(isolate())); - TNode entry = UncheckedCast(CallCFunction( - function, MachineType::IntPtr(), - std::make_pair(MachineType::Pointer(), isolate_ptr), - std::make_pair(MachineType::TaggedPointer(), dictionary), - std::make_pair(MachineType::TaggedPointer(), unique_name))); - - if (var_name_index) *var_name_index = EntryToIndex(entry); - if (mode == kFindExisting) { - GotoIf(IntPtrEqual(entry, - IntPtrConstant(InternalIndex::NotFound().raw_value())), - if_not_found_no_insertion_index); - Goto(if_found); - } else { - CSA_DCHECK( - this, - WordNotEqual(entry, - IntPtrConstant(InternalIndex::NotFound().raw_value()))); - Goto(if_not_found_with_insertion_index); - } + NameDictionaryLookupWithForwardIndex(dictionary, unique_name, if_found, + var_name_index, if_not_found, mode); } } @@ -9580,11 +9753,66 @@ template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup(TNode, TNode, Label*, TVariable*, - Label*, LookupMode, - Label*); + Label*, LookupMode); template V8_EXPORT_PRIVATE void CodeStubAssembler::NameDictionaryLookup< GlobalDictionary>(TNode, TNode, Label*, - TVariable*, Label*, LookupMode, Label*); + TVariable*, Label*, LookupMode); + +template +void CodeStubAssembler::NameDictionaryLookupWithForwardIndex( + TNode dictionary, TNode unique_name, Label* if_found, + TVariable* var_name_index, Label* if_not_found, LookupMode mode) { + using ER = ExternalReference; // To avoid super long lines below. + ER func_ref; + if constexpr (std::is_same::value) { + func_ref = mode == kFindInsertionIndex + ? ER::name_dictionary_find_insertion_entry_forwarded_string() + : ER::name_dictionary_lookup_forwarded_string(); + } else if constexpr (std::is_same::value) { + func_ref = + mode == kFindInsertionIndex + ? ER::global_dictionary_find_insertion_entry_forwarded_string() + : ER::global_dictionary_lookup_forwarded_string(); + } else { + auto ref0 = + ER::name_to_index_hashtable_find_insertion_entry_forwarded_string(); + auto ref1 = ER::name_to_index_hashtable_lookup_forwarded_string(); + func_ref = mode == kFindInsertionIndex ? ref0 : ref1; + } + const TNode function = ExternalConstant(func_ref); + const TNode isolate_ptr = + ExternalConstant(ER::isolate_address(isolate())); + TNode entry = UncheckedCast( + CallCFunction(function, MachineType::IntPtr(), + std::make_pair(MachineType::Pointer(), isolate_ptr), + std::make_pair(MachineType::TaggedPointer(), dictionary), + std::make_pair(MachineType::TaggedPointer(), unique_name))); + + if (var_name_index) *var_name_index = EntryToIndex(entry); + switch (mode) { + case kFindInsertionIndex: + CSA_DCHECK( + this, + WordNotEqual(entry, + IntPtrConstant(InternalIndex::NotFound().raw_value()))); + Goto(if_not_found); + break; + case kFindExisting: + GotoIf(IntPtrEqual(entry, + IntPtrConstant(InternalIndex::NotFound().raw_value())), + if_not_found); + Goto(if_found); + break; + case kFindExistingOrInsertionIndex: + GotoIfNot(IntPtrEqual(entry, IntPtrConstant( + InternalIndex::NotFound().raw_value())), + if_found); + NameDictionaryLookupWithForwardIndex(dictionary, unique_name, if_found, + var_name_index, if_not_found, + kFindInsertionIndex); + break; + } +} TNode CodeStubAssembler::ComputeSeededHash(TNode key) { const TNode function_addr = @@ -9604,13 +9832,12 @@ TNode CodeStubAssembler::ComputeSeededHash(TNode key) { template <> void CodeStubAssembler::NameDictionaryLookup( TNode dictionary, TNode unique_name, - Label* if_found, TVariable* var_name_index, - Label* if_not_found_no_insertion_index, LookupMode mode, - Label* if_not_found_with_insertion_index) { - // TODO(pthier): Support path for not found with valid insertion index for + Label* if_found, TVariable* var_name_index, Label* if_not_found, + LookupMode mode) { + // TODO(pthier): Support mode kFindExistingOrInsertionIndex for // SwissNameDictionary. SwissNameDictionaryFindEntry(dictionary, unique_name, if_found, - var_name_index, if_not_found_no_insertion_index); + var_name_index, if_not_found); } void CodeStubAssembler::NumberDictionaryLookup( @@ -11958,6 +12185,7 @@ MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) { return MachineRepresentation::kWord8; case UINT16_ELEMENTS: case INT16_ELEMENTS: + case FLOAT16_ELEMENTS: return MachineRepresentation::kWord16; case UINT32_ELEMENTS: case INT32_ELEMENTS: @@ -12074,7 +12302,8 @@ void CodeStubAssembler::StoreElementTypedArray(TNode elements, static_assert(std::is_same::value || std::is_same::value, "Only RawPtrT or FixedArrayBase elements are allowed"); - static_assert(std::is_same::value || + static_assert(std::is_same::value || + std::is_same::value || std::is_same::value || std::is_same::value || std::is_same::value, @@ -12124,7 +12353,8 @@ void CodeStubAssembler::StoreElement(TNode elements, ElementsKind kind, std::is_same::value, "Only Smi, IntPtrT or UintPtrT indices are allowed"); static_assert( - std::is_same::value || + std::is_same::value || + std::is_same::value || std::is_same::value || std::is_same::value || std::is_same::value || @@ -12151,6 +12381,8 @@ template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement(TNode, ElementsKind, TNode, TNode); +template V8_EXPORT_PRIVATE void CodeStubAssembler::StoreElement( + TNode, ElementsKind, TNode, TNode); TNode CodeStubAssembler::Int32ToUint8Clamped( TNode int32_value) { @@ -12224,6 +12456,8 @@ TNode CodeStubAssembler::PrepareValueForWriteToTypedArray( LoadObjectField(heap_object, offsetof(HeapNumber, value_)); if (elements_kind == UINT8_CLAMPED_ELEMENTS) { var_result = Float64ToUint8Clamped(value); + } else if (elements_kind == FLOAT16_ELEMENTS) { + var_result = ReinterpretCast(TruncateFloat64ToFloat16(value)); } else { var_result = TruncateFloat64ToWord32(value); } @@ -12235,6 +12469,8 @@ TNode CodeStubAssembler::PrepareValueForWriteToTypedArray( TNode value = SmiToInt32(CAST(var_input.value())); if (elements_kind == UINT8_CLAMPED_ELEMENTS) { var_result = Int32ToUint8Clamped(value); + } else if (elements_kind == FLOAT16_ELEMENTS) { + var_result = ReinterpretCast(RoundInt32ToFloat16(value)); } else { var_result = value; } @@ -12251,6 +12487,54 @@ TNode CodeStubAssembler::PrepareValueForWriteToTypedArray( return var_result.value(); } +template <> +TNode CodeStubAssembler::PrepareValueForWriteToTypedArray( + TNode input, ElementsKind elements_kind, TNode context) { + DCHECK(IsTypedArrayElementsKind(elements_kind)); + CHECK_EQ(elements_kind, FLOAT16_ELEMENTS); + + TVARIABLE(Float16T, var_result); + TVARIABLE(Object, var_input, input); + Label done(this, &var_result), if_smi(this), if_heapnumber_or_oddball(this), + convert(this), loop(this, &var_input); + Goto(&loop); + BIND(&loop); + GotoIf(TaggedIsSmi(var_input.value()), &if_smi); + // We can handle both HeapNumber and Oddball here, since Oddball has the + // same layout as the HeapNumber for the HeapNumber::value field. This + // way we can also properly optimize stores of oddballs to typed arrays. + TNode heap_object = CAST(var_input.value()); + GotoIf(IsHeapNumber(heap_object), &if_heapnumber_or_oddball); + STATIC_ASSERT_FIELD_OFFSETS_EQUAL(offsetof(HeapNumber, value_), + offsetof(Oddball, to_number_raw_)); + Branch(HasInstanceType(heap_object, ODDBALL_TYPE), &if_heapnumber_or_oddball, + &convert); + + BIND(&if_heapnumber_or_oddball); + { + TNode value = + LoadObjectField(heap_object, offsetof(HeapNumber, value_)); + var_result = TruncateFloat64ToFloat16(value); + Goto(&done); + } + + BIND(&if_smi); + { + TNode value = SmiToInt32(CAST(var_input.value())); + var_result = RoundInt32ToFloat16(value); + Goto(&done); + } + + BIND(&convert); + { + var_input = CallBuiltin(Builtin::kNonNumberToNumber, context, input); + Goto(&loop); + } + + BIND(&done); + return var_result.value(); +} + template <> TNode CodeStubAssembler::PrepareValueForWriteToTypedArray( TNode input, ElementsKind elements_kind, TNode context) { @@ -12424,6 +12708,26 @@ void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue( } } +template <> +void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue( + TNode value, ElementsKind elements_kind, + TNode converted_value, TVariable* maybe_converted_value) { + Label dont_allocate_heap_number(this), end(this); + GotoIf(TaggedIsSmi(value), &dont_allocate_heap_number); + GotoIf(IsHeapNumber(CAST(value)), &dont_allocate_heap_number); + { + *maybe_converted_value = + AllocateHeapNumberWithValue(ChangeFloat16ToFloat64(converted_value)); + Goto(&end); + } + BIND(&dont_allocate_heap_number); + { + *maybe_converted_value = value; + Goto(&end); + } + BIND(&end); +} + template <> void CodeStubAssembler::EmitElementStoreTypedArrayUpdateValue( TNode value, ElementsKind elements_kind, @@ -12604,6 +12908,12 @@ void CodeStubAssembler::EmitElementStore( elements_kind, store_mode, bailout, context, maybe_converted_value); break; + case FLOAT16_ELEMENTS: + case RAB_GSAB_FLOAT16_ELEMENTS: + EmitElementStoreTypedArray(typed_array, intptr_key, value, + elements_kind, store_mode, bailout, + context, maybe_converted_value); + break; default: UNREACHABLE(); } @@ -12815,7 +13125,7 @@ void CodeStubAssembler::TrapAllocationMemento(TNode object, TNode object_word = BitcastTaggedToWord(object); // TODO(v8:11641): Skip TrapAllocationMemento when allocation-site // tracking is disabled. - TNode object_page_header = PageHeaderFromAddress(object_word); + TNode object_page_header = MemoryChunkFromAddress(object_word); { TNode page_flags = Load( object_page_header, IntPtrConstant(MemoryChunkLayout::kFlagsOffset)); @@ -12835,11 +13145,11 @@ void CodeStubAssembler::TrapAllocationMemento(TNode object, TNode memento_last_word = IntPtrAdd( object_word, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag)); TNode memento_last_word_page_header = - PageHeaderFromAddress(memento_last_word); + MemoryChunkFromAddress(memento_last_word); TNode new_space_top = Load(new_space_top_address); TNode new_space_top_page_header = - PageHeaderFromAddress(new_space_top); + MemoryChunkFromAddress(new_space_top); // If the object is in new space, we need to check whether respective // potential memento object is on the same page as the current top. @@ -12874,22 +13184,24 @@ void CodeStubAssembler::TrapAllocationMemento(TNode object, Comment("] TrapAllocationMemento"); } -TNode CodeStubAssembler::PageHeaderFromAddress( +TNode CodeStubAssembler::MemoryChunkFromAddress( TNode address) { DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL); - return WordAnd( - address, - IntPtrConstant(~MemoryChunkHeader::GetAlignmentMaskForAssembler())); + return WordAnd(address, + IntPtrConstant(~MemoryChunk::GetAlignmentMaskForAssembler())); } -TNode CodeStubAssembler::PageFromPageHeader(TNode address) { +TNode CodeStubAssembler::PageMetadataFromMemoryChunk( + TNode address) { DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL); - return address; + return Load(address, + IntPtrConstant(MemoryChunkLayout::kMetadataOffset)); } -TNode CodeStubAssembler::PageFromAddress(TNode address) { +TNode CodeStubAssembler::PageMetadataFromAddress( + TNode address) { DCHECK(!V8_ENABLE_THIRD_PARTY_HEAP_BOOL); - return PageFromPageHeader(PageHeaderFromAddress(address)); + return PageMetadataFromMemoryChunk(MemoryChunkFromAddress(address)); } TNode CodeStubAssembler::CreateAllocationSiteInFeedbackVector( @@ -15943,14 +16255,14 @@ TNode CodeStubAssembler::RabGsabElementsKindToElementByteSize( int32_t elements_kinds[] = { RAB_GSAB_UINT8_ELEMENTS, RAB_GSAB_UINT8_CLAMPED_ELEMENTS, RAB_GSAB_INT8_ELEMENTS, RAB_GSAB_UINT16_ELEMENTS, - RAB_GSAB_INT16_ELEMENTS, RAB_GSAB_UINT32_ELEMENTS, - RAB_GSAB_INT32_ELEMENTS, RAB_GSAB_FLOAT32_ELEMENTS, - RAB_GSAB_FLOAT64_ELEMENTS, RAB_GSAB_BIGINT64_ELEMENTS, - RAB_GSAB_BIGUINT64_ELEMENTS}; + RAB_GSAB_INT16_ELEMENTS, RAB_GSAB_FLOAT16_ELEMENTS, + RAB_GSAB_UINT32_ELEMENTS, RAB_GSAB_INT32_ELEMENTS, + RAB_GSAB_FLOAT32_ELEMENTS, RAB_GSAB_FLOAT64_ELEMENTS, + RAB_GSAB_BIGINT64_ELEMENTS, RAB_GSAB_BIGUINT64_ELEMENTS}; Label* elements_kind_labels[] = {&elements_8, &elements_8, &elements_8, - &elements_16, &elements_16, &elements_32, - &elements_32, &elements_32, &elements_64, - &elements_64, &elements_64}; + &elements_16, &elements_16, &elements_16, + &elements_32, &elements_32, &elements_32, + &elements_64, &elements_64, &elements_64}; const size_t kTypedElementsKindCount = LAST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND - FIRST_RAB_GSAB_FIXED_TYPED_ARRAY_ELEMENTS_KIND + 1; @@ -16396,7 +16708,7 @@ TNode CodeStubAssembler::GetSharedFunctionInfoCode( CSA_DCHECK(this, Word32Equal(data_type, Int32Constant(INTERPRETER_DATA_TYPE))); { - TNode trampoline = CAST(LoadProtectedPointerFromObject( + TNode trampoline = CAST(LoadProtectedPointerField( CAST(sfi_data), InterpreterData::kInterpreterTrampolineOffset)); sfi_code = trampoline; } @@ -16637,7 +16949,8 @@ void CodeStubAssembler::PrintToStream(const char* prefix, HeapConstantNoHole(string), SmiConstant(stream)); } // CallRuntime only accepts Objects, so do an UncheckedCast to object. - // DebugPrint explicitly checks whether the tagged value is a MaybeObject. + // DebugPrint explicitly checks whether the tagged value is a + // Tagged. TNode arg = UncheckedCast(tagged_value); CallRuntime(Runtime::kDebugPrint, NoContextConstant(), arg, SmiConstant(stream)); @@ -17701,9 +18014,9 @@ void CodeStubAssembler::SharedValueBarrier( // trivially shared. CSA_DCHECK(this, BoolConstant(ReadOnlyHeap::IsReadOnlySpaceShared())); TNode page_flags = LoadBasicMemoryChunkFlags(CAST(value)); - GotoIf(WordNotEqual(WordAnd(page_flags, - IntPtrConstant(BasicMemoryChunk::READ_ONLY_HEAP)), - IntPtrConstant(0)), + GotoIf(WordNotEqual( + WordAnd(page_flags, IntPtrConstant(MemoryChunk::READ_ONLY_HEAP)), + IntPtrConstant(0)), &skip_barrier); // Fast path: Check if the HeapObject is already shared. @@ -17717,12 +18030,11 @@ void CodeStubAssembler::SharedValueBarrier( BIND(&check_in_shared_heap); { - Branch( - WordNotEqual( - WordAnd(page_flags, - IntPtrConstant(BasicMemoryChunk::IN_WRITABLE_SHARED_SPACE)), - IntPtrConstant(0)), - &skip_barrier, &slow); + Branch(WordNotEqual( + WordAnd(page_flags, + IntPtrConstant(MemoryChunk::IN_WRITABLE_SHARED_SPACE)), + IntPtrConstant(0)), + &skip_barrier, &slow); } // Slow path: Call out to runtime to share primitives and to throw on @@ -17740,8 +18052,8 @@ void CodeStubAssembler::SharedValueBarrier( this, WordNotEqual( WordAnd(LoadBasicMemoryChunkFlags(CAST(var_shared_value->value())), - IntPtrConstant(BasicMemoryChunk::READ_ONLY_HEAP | - BasicMemoryChunk::IN_WRITABLE_SHARED_SPACE)), + IntPtrConstant(MemoryChunk::READ_ONLY_HEAP | + MemoryChunk::IN_WRITABLE_SHARED_SPACE)), IntPtrConstant(0))); Goto(&done); } diff --git a/deps/v8/src/codegen/code-stub-assembler.h b/deps/v8/src/codegen/code-stub-assembler.h index b57ec139dd3452..b082bb8dc7bf34 100644 --- a/deps/v8/src/codegen/code-stub-assembler.h +++ b/deps/v8/src/codegen/code-stub-assembler.h @@ -33,6 +33,7 @@ #include "src/objects/swiss-name-dictionary.h" #include "src/objects/tagged-index.h" #include "src/objects/tagged.h" +#include "src/objects/templates.h" #include "src/roots/roots.h" #include "torque-generated/exported-macros-assembler.h" @@ -72,6 +73,8 @@ enum class PrimitiveType { kBoolean, kNumber, kString, kSymbol }; V(SetIteratorProtector, set_iterator_protector, SetIteratorProtector) \ V(StringIteratorProtector, string_iterator_protector, \ StringIteratorProtector) \ + V(StringWrapperToPrimitiveProtector, string_wrapper_to_primitive_protector, \ + StringWrapperToPrimitiveProtector) \ V(TypedArraySpeciesProtector, typed_array_species_protector, \ TypedArraySpeciesProtector) \ V(AsyncFunctionAwaitRejectSharedFun, async_function_await_reject_shared_fun, \ @@ -402,6 +405,9 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode ParameterToIntPtr(TNode value) { return Signed(value); } + TNode ParameterToIntPtr(TNode value) { + return TaggedIndexToIntPtr(value); + } TNode ParameterToTagged(TNode value) { return value; } @@ -1036,8 +1042,12 @@ class V8_EXPORT_PRIVATE CodeStubAssembler single_char[0])); } + TNode TruncateFloat32ToFloat16(TNode value); + TNode TruncateFloat64ToFloat16(TNode value); + TNode TruncateWordToInt32(TNode value); TNode TruncateIntPtrToInt32(TNode value); + TNode TruncateWord64ToWord32(TNode value); // Check a value for smi-ness TNode TaggedIsSmi(TNode a); @@ -1253,19 +1263,27 @@ class V8_EXPORT_PRIVATE CodeStubAssembler CodeEntrypointTag tag); #endif - TNode LoadProtectedPointerFromObject( - TNode object, int offset); + TNode LoadProtectedPointerField(TNode object, + TNode offset) { + return CAST(LoadProtectedPointerFromObject( + object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)))); + } + TNode LoadProtectedPointerField(TNode object, + int offset) { + return CAST(LoadProtectedPointerFromObject( + object, IntPtrConstant(offset - kHeapObjectTag))); + } TNode LoadForeignForeignAddressPtr(TNode object) { return LoadExternalPointerFromObject(object, Foreign::kForeignAddressOffset, kForeignForeignAddressTag); } - TNode LoadCallHandlerInfoJsCallbackPtr( - TNode object) { + TNode LoadFunctionTemplateInfoJsCallbackPtr( + TNode object) { return LoadExternalPointerFromObject( - object, CallHandlerInfo::kMaybeRedirectedCallbackOffset, - kCallHandlerInfoCallbackTag); + object, FunctionTemplateInfo::kMaybeRedirectedCallbackOffset, + kFunctionTemplateInfoCallbackTag); } TNode LoadExternalStringResourcePtr(TNode object) { @@ -1293,6 +1311,18 @@ class V8_EXPORT_PRIVATE CodeStubAssembler } #if V8_ENABLE_WEBASSEMBLY + // Returns WasmApiFunctionRef or WasmTrustedInstanceData. + TNode LoadRefFromWasmInternalFunction( + TNode object) { + TNode ref = LoadTrustedPointerFromObject( + object, WasmInternalFunction::kIndirectRefOffset, + kUnknownIndirectPointerTag); + CSA_DCHECK(this, + Word32Or(HasInstanceType(ref, WASM_TRUSTED_INSTANCE_DATA_TYPE), + HasInstanceType(ref, WASM_API_FUNCTION_REF_TYPE))); + return CAST(ref); + } + TNode LoadWasmInternalFunctionCallTargetPtr( TNode object) { return LoadExternalPointerFromObject( @@ -1547,7 +1577,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode LoadFixedArrayBaseLength(TNode array); template TNode LoadArrayCapacity(TNode array) { - return LoadObjectField(array, Array::ShapeT::kCapacityOffset); + return LoadObjectField(array, Array::Shape::kCapacityOffset); } // Load the length of a fixed array base instance. TNode LoadAndUntagFixedArrayBaseLength(TNode array); @@ -1642,7 +1672,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler void DispatchMaybeObject(TNode maybe_object, Label* if_smi, Label* if_cleared, Label* if_weak, Label* if_strong, TVariable* extracted); - // See MaybeObject for semantics of these functions. + // See Tagged for semantics of these functions. TNode IsStrong(TNode value); TNode IsStrong(TNode value); TNode GetHeapObjectIfStrong(TNode value, @@ -1697,6 +1727,14 @@ class V8_EXPORT_PRIVATE CodeStubAssembler FixedArrayBoundsCheck(array, Signed(index), additional_offset); } + void FixedArrayBoundsCheck(TNode array, + TNode index, int additional_offset); + void FixedArrayBoundsCheck(TNode array, TNode index, + int additional_offset) { + FixedArrayBoundsCheck(UncheckedCast(array), index, + additional_offset); + } + // Array is any array-like type that has a fixed header followed by // tagged elements. template @@ -2768,13 +2806,20 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode TryHeapNumberToSmi(TNode number, Label* not_smi); TNode TryFloat32ToSmi(TNode number, Label* not_smi); TNode TryFloat64ToSmi(TNode number, Label* not_smi); + + TNode BitcastFloat16ToUint32(TNode value); + TNode BitcastUint32ToFloat16(TNode value); + TNode RoundInt32ToFloat16(TNode value); + + TNode ChangeFloat16ToFloat64(TNode value); + TNode ChangeFloat16ToFloat32(TNode value); TNode ChangeFloat32ToTagged(TNode value); TNode ChangeFloat64ToTagged(TNode value); TNode ChangeInt32ToTagged(TNode value); TNode ChangeInt32ToTaggedNoOverflow(TNode value); TNode ChangeUint32ToTagged(TNode value); TNode ChangeUintPtrToTagged(TNode value); - TNode ChangeNumberToUint32(TNode value); + TNode ChangeNonNegativeNumberToUint32(TNode value); TNode ChangeNumberToFloat64(TNode value); TNode ChangeTaggedNonSmiToInt32(TNode context, @@ -3000,6 +3045,7 @@ class V8_EXPORT_PRIVATE CodeStubAssembler TNode IsSpecialReceiverMap(TNode map); TNode IsStringInstanceType(TNode instance_type); TNode IsString(TNode object); + TNode IsStringWrapper(TNode object); TNode IsSeqOneByteString(TNode object); TNode IsSymbolInstanceType(TNode instance_type); @@ -3533,33 +3579,37 @@ class V8_EXPORT_PRIVATE CodeStubAssembler template void SetNameDictionaryFlags(TNode, TNode flags); - enum LookupMode { kFindExisting, kFindInsertionIndex }; + enum LookupMode { + kFindExisting, + kFindInsertionIndex, + kFindExistingOrInsertionIndex + }; template TNode LoadName(TNode key); // Looks up an entry in a NameDictionaryBase successor. - // For {mode} == kFindExisting: - // If the entry is found control goes to {if_found} and {var_name_index} - // contains an index of the key field of the entry found. - // If the key is not found and {if_not_found_with_insertion_index} is - // provided, control goes to {if_not_found_with_insertion_index} and - // {var_name_index} contains the index of the key field to insert the given - // name at. - // Otherwise control goes to {if_not_found_no_insertion_index}. - // For {mode} == kFindInsertionIndex: - // {if_not_found_no_insertion_index} and {if_not_found_with_insertion_index} - // are treated equally. If {if_not_found_with_insertion_index} is provided, - // control goes to {if_not_found_with_insertion_index}, otherwise control - // goes to {if_not_found_no_insertion_index}. In both cases {var_name_index} - // contains the index of the key field to insert the given name at. + // If the entry is found control goes to {if_found} and {var_name_index} + // contains an index of the key field of the entry found. + // If the key is not found control goes to {if_not_found}. If mode is + // {kFindExisting}, {var_name_index} might contain garbage, otherwise + // {var_name_index} contains the index of the key field to insert the given + // name at. template void NameDictionaryLookup(TNode dictionary, TNode unique_name, Label* if_found, TVariable* var_name_index, - Label* if_not_found_no_insertion_index, - LookupMode mode = kFindExisting, - Label* if_not_found_with_insertion_index = nullptr); + Label* if_not_found, + LookupMode mode = kFindExisting); + // Slow lookup for unique_names with forwarding index. + // Both resolving the actual hash and the lookup are handled via runtime. + template + void NameDictionaryLookupWithForwardIndex(TNode dictionary, + TNode unique_name, + Label* if_found, + TVariable* var_name_index, + Label* if_not_found, + LookupMode mode = kFindExisting); TNode ComputeSeededHash(TNode key); @@ -3915,13 +3965,13 @@ class V8_EXPORT_PRIVATE CodeStubAssembler void TrapAllocationMemento(TNode object, Label* memento_found); - // Helpers to look up MemoryChunk/Page metadata for a given address. - // Equivalent to MemoryChunkHeader::FromAddress(). - TNode PageHeaderFromAddress(TNode address); - // Equivalent to MemoryChunkHeader::MemoryChunk(). - TNode PageFromPageHeader(TNode address); - // Equivalent to BasicMemoryChunk::FromAddress(). - TNode PageFromAddress(TNode address); + // Helpers to look up Page metadata for a given address. + // Equivalent to MemoryChunk::FromAddress(). + TNode MemoryChunkFromAddress(TNode address); + // Equivalent to MemoryChunk::MutablePageMetadata(). + TNode PageMetadataFromMemoryChunk(TNode address); + // Equivalent to MemoryChunkMetadata::FromAddress(). + TNode PageMetadataFromAddress(TNode address); // Store a weak in-place reference into the FeedbackVector. TNode StoreWeakReferenceInFeedbackVector( diff --git a/deps/v8/src/codegen/compiler.cc b/deps/v8/src/codegen/compiler.cc index ef0153c6697d8f..a2984d393169a3 100644 --- a/deps/v8/src/codegen/compiler.cc +++ b/deps/v8/src/codegen/compiler.cc @@ -1720,8 +1720,10 @@ BackgroundCompileTask::BackgroundCompileTask( BackgroundCompileTask::~BackgroundCompileTask() = default; +namespace { + void SetScriptFieldsFromDetails(Isolate* isolate, Tagged