#include "src/wasm/jump-table-assembler.h"
#include "src/codegen/macro-assembler-inl.h"
namespace v8 {
namespace internal {
namespace wasm {
void JumpTableAssembler::GenerateLazyCompileTable(
Address base, uint32_t num_slots, uint32_t num_imported_functions,
Address wasm_compile_lazy_target) { … }
void JumpTableAssembler::InitializeJumpsToLazyCompileTable(
Address base, uint32_t num_slots, Address lazy_compile_table_start) { … }
#if V8_TARGET_ARCH_X64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) { … }
bool JumpTableAssembler::EmitJumpSlot(Address target) { … }
void JumpTableAssembler::EmitFarJumpSlot(Address target) { … }
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) { … }
void JumpTableAssembler::NopBytes(int bytes) { … }
void JumpTableAssembler::SkipUntil(int offset) { … }
#elif V8_TARGET_ARCH_IA32
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
mov(kWasmCompileLazyFuncIndexRegister, func_index);
jmp(lazy_compile_target, RelocInfo::NO_INFO);
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
jmp(target, RelocInfo::NO_INFO);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
jmp(target, RelocInfo::NO_INFO);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
}
void JumpTableAssembler::NopBytes(int bytes) {
if (bytes) Nop(bytes);
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_GE(offset, pc_offset());
pc_ += offset - pc_offset();
}
#elif V8_TARGET_ARCH_ARM
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
Move32BitImmediate(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
EmitJumpSlot(lazy_compile_target);
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
Move32BitImmediate(pc, Operand(target, RelocInfo::WASM_CALL));
CheckConstPool(true, false);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
ldr_pcrel(pc, -kInstrSize);
dd(target);
static_assert(kInstrSize == kInt32Size);
static_assert(kFarJumpTableSlotSize == 2 * kInstrSize);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_ARM64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
int start = pc_offset();
CodeEntry();
Mov(kWasmCompileLazyFuncIndexRegister.W(), func_index);
Jump(lazy_compile_target, RelocInfo::NO_INFO);
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK(nop_bytes == 0 || nop_bytes == kInstrSize);
if (nop_bytes) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
static constexpr ptrdiff_t kCodeEntryMarkerSize = kInstrSize;
#else
static constexpr ptrdiff_t kCodeEntryMarkerSize = 0;
#endif
uint8_t* jump_pc = pc_ + kCodeEntryMarkerSize;
ptrdiff_t jump_distance = reinterpret_cast<uint8_t*>(target) - jump_pc;
DCHECK_EQ(0, jump_distance % kInstrSize);
int64_t instr_offset = jump_distance / kInstrSize;
if (!MacroAssembler::IsNearCallOffset(instr_offset)) {
return false;
}
CodeEntry();
DCHECK_EQ(jump_pc, pc_);
DCHECK_EQ(instr_offset,
reinterpret_cast<Instr*>(target) - reinterpret_cast<Instr*>(pc_));
DCHECK(is_int26(instr_offset));
b(static_cast<int>(instr_offset));
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
CodeEntry();
constexpr Register kTmpReg = x16;
DCHECK(TmpList()->IncludesAliasOf(kTmpReg));
int kOffset = ENABLE_CONTROL_FLOW_INTEGRITY_BOOL ? 3 : 2;
ldr_pcrel(kTmpReg, kOffset);
br(kTmpReg);
#ifdef V8_ENABLE_CONTROL_FLOW_INTEGRITY
nop();
#endif
dq(target);
static_assert(2 * kInstrSize == kSystemPointerSize);
const int kSlotCount = ENABLE_CONTROL_FLOW_INTEGRITY_BOOL ? 6 : 4;
static_assert(kFarJumpTableSlotSize == kSlotCount * kInstrSize);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
int kTargetOffset =
ENABLE_CONTROL_FLOW_INTEGRITY_BOOL ? 4 * kInstrSize : 2 * kInstrSize;
DCHECK(IsAligned(slot + kTargetOffset, kSystemPointerSize));
reinterpret_cast<std::atomic<Address>*>(slot + kTargetOffset)
->store(target, std::memory_order_relaxed);
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_S390X
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
lgfi(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
mov(r1, Operand(lazy_compile_target, RelocInfo::CODE_TARGET));
b(r1);
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
intptr_t relative_target = reinterpret_cast<uint8_t*>(target) - pc_;
if (!is_int32(relative_target / 2)) {
return false;
}
brcl(al, Operand(relative_target / 2));
nop(0);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
Label target_addr;
lgrl(ip, &target_addr);
b(ip);
CHECK_EQ(reinterpret_cast<Address>(pc_) & 0x7, 0);
bind(&target_addr);
dp(target);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
Address target_addr = slot + 8;
reinterpret_cast<std::atomic<Address>*>(target_addr)
->store(target, std::memory_order_relaxed);
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % 2);
for (; bytes > 0; bytes -= 2) {
nop(0);
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_MIPS64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
int start = pc_offset();
li(kWasmCompileLazyFuncIndexRegister, func_index);
Jump(lazy_compile_target, RelocInfo::NO_INFO);
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK_EQ(nop_bytes % kInstrSize, 0);
for (int i = 0; i < nop_bytes; i += kInstrSize) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
PatchAndJump(target);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
li(t9, Operand(target, RelocInfo::OFF_HEAP_TARGET));
Jump(t9);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_LOONG64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
DCHECK(is_int32(func_index));
int start = pc_offset();
li(kWasmCompileLazyFuncIndexRegister, (int32_t)func_index);
CHECK(EmitJumpSlot(lazy_compile_target));
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK_EQ(nop_bytes % kInstrSize, 0);
for (int i = 0; i < nop_bytes; i += kInstrSize) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
intptr_t relative_target = reinterpret_cast<uint8_t*>(target) - pc_;
DCHECK_EQ(relative_target % 4, 0);
intptr_t instr_offset = relative_target / kInstrSize;
if (!is_int26(instr_offset)) {
return false;
}
b(instr_offset);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
pcaddi(t7, 4);
Ld_d(t7, MemOperand(t7, 0));
jirl(zero_reg, t7, 0);
nop();
DCHECK_EQ(reinterpret_cast<uint64_t>(pc_) % 8, 0);
dq(target);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
Address target_addr = slot + kFarJumpTableSlotSize - 8;
reinterpret_cast<std::atomic<Address>*>(target_addr)
->store(target, std::memory_order_relaxed);
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_PPC64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
int start = pc_offset();
mov(kWasmCompileLazyFuncIndexRegister, Operand(func_index));
mov(r0, Operand(lazy_compile_target));
mtctr(r0);
bctr();
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK_EQ(nop_bytes % kInstrSize, 0);
for (int i = 0; i < nop_bytes; i += kInstrSize) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
intptr_t relative_target = reinterpret_cast<uint8_t*>(target) - pc_;
if (!is_int26(relative_target)) {
return false;
}
b(relative_target, LeaveLK);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
uint8_t* start = pc_;
mov(ip, Operand(reinterpret_cast<Address>(start + kFarJumpTableSlotSize -
8)));
LoadU64(ip, MemOperand(ip));
mtctr(ip);
bctr();
uint8_t* end = pc_;
int used = end - start;
CHECK(used < kFarJumpTableSlotSize - 8);
NopBytes(kFarJumpTableSlotSize - 8 - used);
CHECK_EQ(reinterpret_cast<Address>(pc_) & 0x7, 0);
dp(target);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
Address target_addr = slot + kFarJumpTableSlotSize - 8;
reinterpret_cast<std::atomic<Address>*>(target_addr)
->store(target, std::memory_order_relaxed);
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % 4);
for (; bytes > 0; bytes -= 4) {
nop(0);
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_RISCV64
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
int start = pc_offset();
li(kWasmCompileLazyFuncIndexRegister, func_index);
Jump(lazy_compile_target, RelocInfo::NO_INFO);
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK_EQ(nop_bytes % kInstrSize, 0);
for (int i = 0; i < nop_bytes; i += kInstrSize) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
PatchAndJump(target);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
UseScratchRegisterScope temp(this);
Register rd = temp.Acquire();
auipc(rd, 0);
ld(rd, rd, 4 * kInstrSize);
Jump(rd);
nop();
dq(target);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#elif V8_TARGET_ARCH_RISCV32
void JumpTableAssembler::EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target) {
int start = pc_offset();
li(kWasmCompileLazyFuncIndexRegister, func_index);
Jump(lazy_compile_target, RelocInfo::NO_INFO);
int nop_bytes = start + kLazyCompileTableSlotSize - pc_offset();
DCHECK_EQ(nop_bytes % kInstrSize, 0);
for (int i = 0; i < nop_bytes; i += kInstrSize) nop();
}
bool JumpTableAssembler::EmitJumpSlot(Address target) {
PatchAndJump(target);
return true;
}
void JumpTableAssembler::EmitFarJumpSlot(Address target) {
UseScratchRegisterScope temp(this);
Register rd = temp.Acquire();
auipc(rd, 0);
lw(rd, rd, 4 * kInstrSize);
Jump(rd);
nop();
dq(target);
}
void JumpTableAssembler::PatchFarJumpSlot(Address slot, Address target) {
UNREACHABLE();
}
void JumpTableAssembler::NopBytes(int bytes) {
DCHECK_LE(0, bytes);
DCHECK_EQ(0, bytes % kInstrSize);
for (; bytes > 0; bytes -= kInstrSize) {
nop();
}
}
void JumpTableAssembler::SkipUntil(int offset) {
DCHECK_EQ(offset, pc_offset());
}
#else
#error Unknown architecture.
#endif
}
}
}