Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry-pick arm64 loadtaggedpointer fix #1921

Merged
merged 1 commit into from
Mar 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,7 @@ http_archive(
"//:patches/v8/0013-Attach-continuation-context-to-Promise-thenable-task.patch",
"//:patches/v8/0014-increase-visibility-of-virtual-method.patch",
"//:patches/v8/0015-Add-ValueSerializer-SetTreatFunctionsAsHostObjects.patch",
"//:patches/v8/0016-wasm-liftoff-arm64-Fix-LoadTaggedPointer.patch",
],
integrity = "sha256-QphdaJn35eZeo+qoayNFIgm02hX5WHjKf+pr3WXCiEs=",
strip_prefix = "v8-12.3.219.10",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From ea3fcf4ecc3e6f5269f36409e061c5a501cd7bc1 Mon Sep 17 00:00:00 2001
From c0e8edbdd404361db740316cd43bc347bfe4730f Mon Sep 17 00:00:00 2001
From: Alex Robinson <arobinson@cloudflare.com>
Date: Wed, 2 Mar 2022 15:58:04 -0600
Subject: Allow manually setting ValueDeserializer format version
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 644bb085d45a180ba0d31dcdf1b13b434305907e Mon Sep 17 00:00:00 2001
From 6f67b7491de310dbe14586402d03ca489598496c Mon Sep 17 00:00:00 2001
From: James M Snell <jasnell@gmail.com>
Date: Wed, 16 Mar 2022 08:59:21 -0700
Subject: Allow manually setting ValueSerializer format version
Expand Down
2 changes: 1 addition & 1 deletion patches/v8/0003-Add-ArrayBuffer-MaybeNew.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From f76485b8d3249876339c2fef437aa83faf2f15a5 Mon Sep 17 00:00:00 2001
From 18c53a127e3d83ae49dc0cab45a0ba78e6111ac8 Mon Sep 17 00:00:00 2001
From: Kenton Varda <kenton@cloudflare.com>
Date: Fri, 16 Sep 2022 21:41:45 -0500
Subject: Add `ArrayBuffer::MaybeNew()`.
Expand Down
2 changes: 1 addition & 1 deletion patches/v8/0004-Allow-Windows-builds-under-Bazel.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 651a05cb9182c40ed5faba671f07468db83bce77 Mon Sep 17 00:00:00 2001
From 92cd581a93d31cb4f0f06e04a479bb35cc5bab13 Mon Sep 17 00:00:00 2001
From: Brendan Coll <bcoll@cloudflare.com>
Date: Thu, 16 Mar 2023 11:56:10 +0000
Subject: Allow Windows builds under Bazel
Expand Down
2 changes: 1 addition & 1 deletion patches/v8/0005-Disable-bazel-whole-archive-build.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 8f92cf45bda03b600cd26081efd5d3766e60ea3a Mon Sep 17 00:00:00 2001
From acfe5d6daeb91acfaacf0f3f8f38e4d43f2f8e23 Mon Sep 17 00:00:00 2001
From: Felix Hanau <felix@cloudflare.com>
Date: Tue, 11 Apr 2023 14:41:31 -0400
Subject: Disable bazel whole-archive build
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From af3147000e4c02116237c3d86d790570f3ec6947 Mon Sep 17 00:00:00 2001
From 4a43a003fcba6d9a441a906a8c3f4360a2890806 Mon Sep 17 00:00:00 2001
From: Kenton Varda <kenton@cloudflare.com>
Date: Tue, 23 May 2023 09:18:57 -0500
Subject: Make v8::Locker automatically call isolate->Enter().
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From da8e0cd614531e6d64ef5d05f5538b9b2a90ae28 Mon Sep 17 00:00:00 2001
From 45609de67c8486fbda8342799cb36ee1d8d94383 Mon Sep 17 00:00:00 2001
From: Kenton Varda <kenton@cloudflare.com>
Date: Tue, 23 May 2023 09:24:11 -0500
Subject: Add an API to capture and restore the cage base pointers.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 3b6ba21a262cbd2c3560b070f24afe8c28b88c68 Mon Sep 17 00:00:00 2001
From 902b7c63a8ae4c3fb7732d8999eb0d04538c115d Mon Sep 17 00:00:00 2001
From: Felix Hanau <felix@cloudflare.com>
Date: Wed, 7 Jun 2023 21:40:54 -0400
Subject: Speed up V8 bazel build by always using target cfg
Expand Down
2 changes: 1 addition & 1 deletion patches/v8/0009-Implement-Promise-Context-Tagging.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From fcc9f4b0442a0806be4459648a174e2da97bb8ed Mon Sep 17 00:00:00 2001
From b443e46b9b63a284c15d8afe3f49dda9568d772b Mon Sep 17 00:00:00 2001
From: James M Snell <jasnell@gmail.com>
Date: Thu, 22 Jun 2023 15:29:26 -0700
Subject: Implement Promise Context Tagging
Expand Down
2 changes: 1 addition & 1 deletion patches/v8/0010-Enable-V8-shared-linkage.patch
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 0dbfed74e3c1a3ab5103eaa7361542a3329571c1 Mon Sep 17 00:00:00 2001
From 5ee1beb8672d73f9a14855407a6876b49f1966e2 Mon Sep 17 00:00:00 2001
From: Felix Hanau <felix@cloudflare.com>
Date: Sun, 9 Jul 2023 18:46:20 -0400
Subject: Enable V8 shared linkage
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From f1977cbb1d19662b17c3c6c569dfb9957429b2b3 Mon Sep 17 00:00:00 2001
From 9da1bd346f715514f1a3a73b42b3b1d909edfe76 Mon Sep 17 00:00:00 2001
From: Orion Hodson <orion@cloudflare.com>
Date: Wed, 13 Sep 2023 15:38:15 +0100
Subject: Randomize the initial ExecutionContextId used by the inspector
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 17f4495249eff3c12e172d100147cf3564adc45f Mon Sep 17 00:00:00 2001
From e15c22c5f3028fe44c3b7cdc08d501f0ba8ef0f0 Mon Sep 17 00:00:00 2001
From: James M Snell <jasnell@gmail.com>
Date: Thu, 18 Jan 2024 10:19:14 -0800
Subject: Always enable continuation preserved data in the build
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From fe5056bb463e7809f57aa46319e3585175543c58 Mon Sep 17 00:00:00 2001
From 309c525f1d4ba2809ae4d685a0d6a223d56202de Mon Sep 17 00:00:00 2001
From: James M Snell <jasnell@gmail.com>
Date: Tue, 6 Feb 2024 17:21:53 -0800
Subject: Attach continuation context to Promise thenable task
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 86b7d5a1d0d150a5e7b4577c433ca202c39cae58 Mon Sep 17 00:00:00 2001
From aeb965c8aded16e17cd4be1c9e29f266007f5ec9 Mon Sep 17 00:00:00 2001
From: Mike Aizatsky <maizatskyi@cloudflare.com>
Date: Tue, 6 Feb 2024 12:55:07 -0800
Subject: increase visibility of virtual method
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
From 5cd6b02898bb2090eb63f2d891342e1e7aff611b Mon Sep 17 00:00:00 2001
From 3146e78539903ecedd62d54702e49ae9475c6a7a Mon Sep 17 00:00:00 2001
From: Kenton Varda <kenton@cloudflare.com>
Date: Sat, 2 Mar 2024 09:00:18 -0600
Subject: Add ValueSerializer::SetTreatFunctionsAsHostObjects().
Expand Down
218 changes: 218 additions & 0 deletions patches/v8/0016-wasm-liftoff-arm64-Fix-LoadTaggedPointer.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,218 @@
From 8a89d470ac20ef90687c1267ae2932059dc82fa2 Mon Sep 17 00:00:00 2001
From: Jakob Kummerow <jkummerow@chromium.org>
Date: Mon, 19 Feb 2024 19:59:06 +0100
Subject: [wasm][liftoff][arm64] Fix LoadTaggedPointer

The previous way to compute the protected_load_pc didn't account for
the possibility of a constant pool getting emitted.

Fixed: 325359458
Change-Id: I7c284175d3c0e9d802ad307adf8d93e721d35361
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/5307536
Reviewed-by: Manos Koukoutos <manoskouk@chromium.org>
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Auto-Submit: Jakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/main@{#92412}

diff --git a/src/wasm/baseline/arm64/liftoff-assembler-arm64-inl.h b/src/wasm/baseline/arm64/liftoff-assembler-arm64-inl.h
index b5eb6dc9b23e3f478425dfe75085cc447cb7cd4e..0be1ffb97778cf0aeb7ce20478ea26c3155ea628 100644
--- a/src/wasm/baseline/arm64/liftoff-assembler-arm64-inl.h
+++ b/src/wasm/baseline/arm64/liftoff-assembler-arm64-inl.h
@@ -544,6 +544,64 @@ void LiftoffAssembler::SpillInstanceData(Register instance) {

void LiftoffAssembler::ResetOSRTarget() {}

+enum class LoadOrStore : bool { kLoad, kStore };
+
+// The purpose of this class is to reconstruct the PC offset of a protected
+// instruction (load or store) that has just been emitted. We cannot simply
+// record the current PC offset before emitting the instruction, because the
+// respective helper function we call might emit more than one instruction
+// (e.g. to load an immediate into a register, or to get a constant pool
+// out of the way).
+//
+// Template arguments:
+// kLoadOrStore:
+// DCHECK that the detected protected instruction has the right type.
+// kExtraEmittedInstructions:
+// By default, we assume that when the destructor runs, the PC is right
+// behind the protected instruction. If additional instructions are expected
+// to have been emitted (such as a pointer decompression), specify their
+// number here.
+template <LoadOrStore kLoadOrStore, uint8_t kExtraEmittedInstructions = 0>
+class GetProtectedInstruction {
+ public:
+ GetProtectedInstruction(LiftoffAssembler* assm,
+ uint32_t* protected_instruction_pc)
+ : assm_(assm),
+ protected_instruction_pc_(protected_instruction_pc),
+ // First emit any required pools...
+ blocked_pools_scope_(assm, kReservedInstructions * kInstrSize),
+ // ...then record the PC offset before the relevant instruction
+ // sequence.
+ previous_pc_offset_(assm->pc_offset()) {}
+
+ ~GetProtectedInstruction() {
+ if (!protected_instruction_pc_) return;
+ *protected_instruction_pc_ =
+ assm_->pc_offset() - kInstrSize * (1 + kExtraEmittedInstructions);
+ if constexpr (kLoadOrStore == LoadOrStore::kLoad) {
+ DCHECK(assm_->InstructionAt(*protected_instruction_pc_)->IsLoad());
+ } else {
+ DCHECK(assm_->InstructionAt(*protected_instruction_pc_)->IsStore());
+ }
+ // Make sure {kReservedInstructions} was large enough.
+ DCHECK_LE(assm_->pc_offset() - previous_pc_offset_,
+ kReservedInstructions * kInstrSize);
+ USE(previous_pc_offset_);
+ }
+
+ private:
+ // For simplicity, we hard-code this value. We could make it a template
+ // argument if we needed more flexibility. It must be at least the maximum
+ // length of the instruction sequence emitted by the {LoadTaggedField} etc.
+ // helper functions below.
+ static constexpr int kReservedInstructions = 4;
+
+ LiftoffAssembler* assm_;
+ uint32_t* protected_instruction_pc_;
+ MacroAssembler::BlockPoolsScope blocked_pools_scope_;
+ int previous_pc_offset_;
+};
+
void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
Register offset_reg,
int32_t offset_imm,
@@ -553,19 +611,11 @@ void LiftoffAssembler::LoadTaggedPointer(Register dst, Register src_addr,
unsigned shift_amount = !needs_shift ? 0 : COMPRESS_POINTERS_BOOL ? 2 : 3;
MemOperand src_op = liftoff::GetMemOp(this, &temps, src_addr, offset_reg,
offset_imm, false, shift_amount);
+ DCHECK(!src_op.IsPostIndex()); // See MacroAssembler::LoadStoreMacroComplex.
+ constexpr uint8_t kDecompressionInstruction = COMPRESS_POINTERS_BOOL ? 1 : 0;
+ GetProtectedInstruction<LoadOrStore::kLoad, kDecompressionInstruction>
+ collect_protected_load(this, protected_load_pc);
LoadTaggedField(dst, src_op);
-
- // Since LoadTaggedField might start with an instruction loading an immediate
- // argument to a register, we have to compute the {protected_load_pc} after
- // calling it.
- // In case of compressed pointers, there is an additional instruction
- // (pointer decompression) after the load.
- uint8_t protected_instruction_offset_bias =
- COMPRESS_POINTERS_BOOL ? 2 * kInstrSize : kInstrSize;
- if (protected_load_pc) {
- *protected_load_pc = pc_offset() - protected_instruction_offset_bias;
- DCHECK(InstructionAt(*protected_load_pc)->IsLoad());
- }
}

void LiftoffAssembler::LoadProtectedPointer(Register dst, Register src_addr,
@@ -602,23 +652,19 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
UseScratchRegisterScope temps(this);
Operand offset_op = offset_reg.is_valid() ? Operand(offset_reg.W(), UXTW)
: Operand(offset_imm);
- // For the write barrier (below), we cannot have both an offset register and
- // an immediate offset. Add them to a 32-bit offset initially, but in a 64-bit
- // register, because that's needed in the MemOperand below.
+ // This is similar to {liftoff::GetMemOp}, but leaves {dst_addr} alone, and
+ // gives us a combined {offset_op}, which we need for the write barrier
+ // below. The 32-bit addition is okay because on-heap offsets don't get
+ // bigger than that.
if (offset_reg.is_valid() && offset_imm) {
Register effective_offset = temps.AcquireX();
Add(effective_offset.W(), offset_reg.W(), offset_imm);
offset_op = effective_offset;
}
-
- StoreTaggedField(src, MemOperand(dst_addr.X(), offset_op));
-
- // Since StoreTaggedField might start with an instruction loading an immediate
- // argument to a register, we have to compute the {protected_load_pc} after
- // calling it.
- if (protected_store_pc) {
- *protected_store_pc = pc_offset() - kInstrSize;
- DCHECK(InstructionAt(*protected_store_pc)->IsStore());
+ {
+ GetProtectedInstruction<LoadOrStore::kStore> collect_protected_store(
+ this, protected_store_pc);
+ StoreTaggedField(src, MemOperand(dst_addr.X(), offset_op));
}

if (skip_write_barrier || v8_flags.disable_write_barriers) return;
@@ -643,6 +689,9 @@ void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
unsigned shift_amount = needs_shift ? type.size_log_2() : 0;
MemOperand src_op = liftoff::GetMemOp(this, &temps, src_addr, offset_reg,
offset_imm, i64_offset, shift_amount);
+ DCHECK(!src_op.IsPostIndex()); // See MacroAssembler::LoadStoreMacroComplex.
+ GetProtectedInstruction<LoadOrStore::kLoad> collect_protected_load(
+ this, protected_load_pc);
switch (type.value()) {
case LoadType::kI32Load8U:
case LoadType::kI64Load8U:
@@ -684,13 +733,6 @@ void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
Ldr(dst.fp().Q(), src_op);
break;
}
- // Since {Ldr*} macros might start with an instruction loading an immediate
- // argument to a register, we have to compute the {protected_load_pc} after
- // calling them.
- if (protected_load_pc) {
- *protected_load_pc = pc_offset() - kInstrSize;
- DCHECK(InstructionAt(*protected_load_pc)->IsLoad());
- }
}

void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
@@ -701,6 +743,9 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
UseScratchRegisterScope temps(this);
MemOperand dst_op = liftoff::GetMemOp(this, &temps, dst_addr, offset_reg,
offset_imm, i64_offset);
+ DCHECK(!dst_op.IsPostIndex()); // See MacroAssembler::LoadStoreMacroComplex.
+ GetProtectedInstruction<LoadOrStore::kStore> collect_protected_store(
+ this, protected_store_pc);
switch (type.value()) {
case StoreType::kI32Store8:
case StoreType::kI64Store8:
@@ -727,13 +772,6 @@ void LiftoffAssembler::Store(Register dst_addr, Register offset_reg,
Str(src.fp().Q(), dst_op);
break;
}
- // Since {Str*} macros might start with an instruction loading an immediate
- // argument to a register, we have to compute the {protected_load_pc} after
- // calling them.
- if (protected_store_pc) {
- *protected_store_pc = pc_offset() - kInstrSize;
- DCHECK(InstructionAt(*protected_store_pc)->IsStore());
- }
}

namespace liftoff {
diff --git a/test/mjsunit/regress/wasm/regress-325359458.js b/test/mjsunit/regress/wasm/regress-325359458.js
new file mode 100644
index 0000000000000000000000000000000000000000..57b71ecd0f9139f41ea103ef56a1cac6a571e968
--- /dev/null
+++ b/test/mjsunit/regress/wasm/regress-325359458.js
@@ -0,0 +1,23 @@
+// Copyright 2024 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+d8.file.execute('test/mjsunit/wasm/wasm-module-builder.js');
+
+let builder = new WasmModuleBuilder();
+let struct = builder.addStruct([makeField(kWasmAnyRef, true)]);
+
+let body = [kExprRefNull, struct];
+for (let i = 0; i < 800; i++) {
+ body = body.concat(...[
+ kGCPrefix, kExprStructGet, struct, 0,
+ kGCPrefix, kExprRefCastNull, struct,
+ ]);
+}
+body = body.concat(...[kExprDrop]);
+builder.addFunction("main", kSig_v_v).exportFunc().addBody(body);
+
+let instance = builder.instantiate();
+// Just check if we can compile without DCHECK failures.
+assertThrows(() => instance.exports.main(), WebAssembly.RuntimeError,
+ 'dereferencing a null pointer');
Loading