diff --git a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp index c481037ff1d78..cf861f03e8c92 100644 --- a/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp +++ b/src/hotspot/cpu/riscv/codeBuffer_riscv.cpp @@ -50,7 +50,7 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline if (requests == nullptr) { return true; } - assert(!UseNewCode, "Not null?"); + assert(UseTrampolines, "We are not using trampolines"); MacroAssembler masm(cb); diff --git a/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp b/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp index 1618647a3f0e3..de70bc2ecc935 100644 --- a/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp +++ b/src/hotspot/cpu/riscv/codeBuffer_riscv.hpp @@ -33,7 +33,7 @@ public: void flush_bundle(bool start_new_bundle) {} - static bool supports_shared_stubs() { return !UseNewCode; } + static bool supports_shared_stubs() { return UseTrampolines; } void share_trampoline_for(address dest, int caller_offset); diff --git a/src/hotspot/cpu/riscv/globals_riscv.hpp b/src/hotspot/cpu/riscv/globals_riscv.hpp index e22456cacc603..cfe00c7207e96 100644 --- a/src/hotspot/cpu/riscv/globals_riscv.hpp +++ b/src/hotspot/cpu/riscv/globals_riscv.hpp @@ -119,6 +119,8 @@ define_pd_global(intx, InlineSmallCode, 1000); product(bool, UseZvkn, false, EXPERIMENTAL, \ "Use Zvkn group extension, Zvkned, Zvknhb, Zvkb, Zvkt") \ product(bool, UseRVVForBigIntegerShiftIntrinsics, true, \ - "Use RVV instructions for left/right shift of BigInteger") + "Use RVV instructions for left/right shift of BigInteger") \ + product(bool, UseTrampolines, false, EXPERIMENTAL, \ + "Far calls uses jal to trampoline.") #endif // CPU_RISCV_GLOBALS_RISCV_HPP diff --git a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp index 339146fa6eeff..afb32a2fee5b9 100644 --- a/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp +++ b/src/hotspot/cpu/riscv/macroAssembler_riscv.cpp @@ -3544,15 +3544,15 @@ address MacroAssembler::trampoline_call(Address entry) { // We need a trampoline if branches are far. if (!in_scratch_emit_size()) { - if (entry.rspec().type() == relocInfo::runtime_call_type && !UseNewCode) { + if (entry.rspec().type() == relocInfo::runtime_call_type && UseTrampolines) { assert(CodeBuffer::supports_shared_stubs(), "must support shared stubs"); code()->share_trampoline_for(entry.target(), offset()); } else { address stub = nullptr; - if (UseNewCode) { - stub = emit_address_stub(offset(), target); - } else { + if (UseTrampolines) { stub = emit_trampoline_stub(offset(), target); + } else { + stub = emit_address_stub(offset(), target); } if (stub == nullptr) { postcond(pc() == badAddress); @@ -3569,10 +3569,10 @@ address MacroAssembler::trampoline_call(Address entry) { } #endif relocate(entry.rspec(), [&] { - if (UseNewCode) { - load_link(target, t0); - } else { + if (UseTrampolines) { jump_link(target, t0); + } else { + load_link(target, t0); } }); @@ -3681,7 +3681,7 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, return nullptr; // CodeBuffer::expand failed } - assert(!UseNewCode, "Bad"); + assert(UseTrampolines, "Must be using trampos."); // We are always 4-byte aligned here. assert_alignment(pc()); @@ -3719,11 +3719,10 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset, int MacroAssembler::max_trampoline_stub_size() { // Max stub size: alignment nop, TrampolineStub. - if (UseNewCode) { - return 2 * wordSize; - } else { + if (UseTrampolines) { return NativeInstruction::instruction_size + NativeShortCall::trampoline_size; } + return 2 * wordSize; } int MacroAssembler::static_call_stub_size() { diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp index 28079df17319d..6614b9e2f0f7f 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp @@ -543,7 +543,7 @@ void NativeFarCall::replace_mt_safe(address instr_addr, address code_buffer) { // NativeCall address NativeCall::instruction_address() const { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->instruction_address(); } else { return NativeShortCall::at(addr_at(0))->instruction_address(); @@ -551,7 +551,7 @@ address NativeCall::instruction_address() const { } address NativeCall::next_instruction_address() const { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->next_instruction_address(); } else { return NativeShortCall::at(addr_at(0))->next_instruction_address(); @@ -559,7 +559,7 @@ address NativeCall::next_instruction_address() const { } address NativeCall::return_address() const { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->return_address(); } else { return NativeShortCall::at(addr_at(0))->return_address(); @@ -567,7 +567,7 @@ address NativeCall::return_address() const { } address NativeCall::destination() const { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->destination(); } else { return NativeShortCall::at(addr_at(0))->destination(); @@ -575,7 +575,7 @@ address NativeCall::destination() const { } address NativeCall::reloc_destination(address orig_address) { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->reloc_destination(orig_address); } else { return NativeShortCall::at(addr_at(0))->reloc_destination(orig_address); @@ -583,7 +583,7 @@ address NativeCall::reloc_destination(address orig_address) { } void NativeCall::set_destination(address dest) { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { NativeFarCall::at(addr_at(0))->set_destination(dest); } else { NativeShortCall::at(addr_at(0))->set_destination(dest); @@ -591,7 +591,7 @@ void NativeCall::set_destination(address dest) { } void NativeCall::verify() { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { NativeFarCall::at(addr_at(0))->verify();; } else { NativeShortCall::at(addr_at(0))->verify(); @@ -599,7 +599,7 @@ void NativeCall::verify() { } void NativeCall::print() { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { NativeFarCall::at(addr_at(0))->print();; } else { NativeShortCall::at(addr_at(0))->print(); @@ -607,7 +607,7 @@ void NativeCall::print() { } bool NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { - if (UseNewCode && NativeFarCall::is_at(addr_at(0))) { + if (!UseTrampolines && NativeFarCall::is_at(addr_at(0))) { return NativeFarCall::at(addr_at(0))->set_destination_mt_safe(dest, assert_lock); } else { return NativeShortCall::at(addr_at(0))->set_destination_mt_safe(dest, assert_lock); @@ -615,7 +615,7 @@ bool NativeCall::set_destination_mt_safe(address dest, bool assert_lock) { } bool NativeCall::is_call_before(address return_address) { - if (UseNewCode) { + if (!UseTrampolines) { return NativeFarCall::is_call_before(return_address) || NativeShortCall::is_call_before(return_address); } else { @@ -624,7 +624,7 @@ bool NativeCall::is_call_before(address return_address) { } void NativeCall::insert(address code_pos, address entry) { - if (UseNewCode && NativeFarCall::is_at(code_pos)) { + if (!UseTrampolines && NativeFarCall::is_at(code_pos)) { NativeFarCall::insert(code_pos, entry); } else { NativeShortCall::insert(code_pos, entry); @@ -632,7 +632,7 @@ void NativeCall::insert(address code_pos, address entry) { } void NativeCall::replace_mt_safe(address instr_addr, address code_buffer) { - if (UseNewCode && NativeFarCall::is_at(instr_addr)) { + if (!UseTrampolines && NativeFarCall::is_at(instr_addr)) { NativeFarCall::replace_mt_safe(instr_addr, code_buffer); } else { NativeShortCall::replace_mt_safe(instr_addr, code_buffer); diff --git a/src/hotspot/cpu/riscv/riscv.ad b/src/hotspot/cpu/riscv/riscv.ad index 30a70d04a4b1b..6363ac5cb3ba1 100644 --- a/src/hotspot/cpu/riscv/riscv.ad +++ b/src/hotspot/cpu/riscv/riscv.ad @@ -1225,22 +1225,24 @@ bool needs_acquiring_load_reserved(const Node *n) int MachCallStaticJavaNode::ret_addr_offset() { - if (UseNewCode) { - return 3 * NativeInstruction::instruction_size; + if (UseTrampolines) { + return 1 * NativeInstruction::instruction_size; // jal } - return 1 * NativeInstruction::instruction_size; // jal + return 3 * NativeInstruction::instruction_size; // auipc + ld + jalr } int MachCallDynamicJavaNode::ret_addr_offset() { - if (UseNewCode) { - return 9 * NativeInstruction::instruction_size; // movptr, auipc + ld + jal + if (UseTrampolines) { + return 7 * NativeInstruction::instruction_size; // movptr, jal } - return 7 * NativeInstruction::instruction_size; // movptr, jal + return 9 * NativeInstruction::instruction_size; // movptr, auipc + ld + jal } int MachCallRuntimeNode::ret_addr_offset() { - // for generated stubs the call will be + // For generated stubs the call will be: + // auipc + ld + jalr + // Using trampos: // jal(addr) // or with far branches // jal(trampoline_stub) @@ -1253,10 +1255,10 @@ int MachCallRuntimeNode::ret_addr_offset() { // jalr(t0) -> jalr CodeBlob *cb = CodeCache::find_blob(_entry_point); if (cb != nullptr) { - if (UseNewCode) { - return 3 * NativeInstruction::instruction_size; + if (UseTrampolines) { + return 1 * NativeInstruction::instruction_size; } - return 1 * NativeInstruction::instruction_size; + return 3 * NativeInstruction::instruction_size; } else { return 11 * NativeInstruction::instruction_size; } @@ -2358,7 +2360,7 @@ encode %{ // The NOP here is purely to ensure that eliding a call to // JVM_EnsureMaterializedForStackWalk doesn't change the code size. __ nop(); - if (UseNewCode) { + if (!UseTrampolines) { __ nop(); __ nop(); }