Skip to content

Commit

Permalink
8293840: RISC-V: Remove cbuf parameter from far_call/far_jump/trampol…
Browse files Browse the repository at this point in the history
…ine_call

Reviewed-by: fyang
  • Loading branch information
feilongjiang authored and RealFYang committed Sep 16, 2022
1 parent 39cd163 commit 5feca68
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 18 deletions.
2 changes: 1 addition & 1 deletion src/hotspot/cpu/riscv/c1_CodeStubs_riscv.cpp
Expand Up @@ -290,7 +290,7 @@ void SimpleExceptionStub::emit_code(LIR_Assembler* ce) {
if (_obj->is_cpu_register()) {
__ mv(t0, _obj->as_register());
}
__ far_call(RuntimeAddress(Runtime1::entry_for(_stub)), NULL, t1);
__ far_call(RuntimeAddress(Runtime1::entry_for(_stub)), t1);
ce->add_call_info_here(_info);
debug_only(__ should_not_reach_here());
}
Expand Down
16 changes: 6 additions & 10 deletions src/hotspot/cpu/riscv/macroAssembler_riscv.cpp
Expand Up @@ -2435,7 +2435,7 @@ ATOMIC_XCHGU(xchgalwu, xchgalw)

#undef ATOMIC_XCHGU

void MacroAssembler::far_jump(Address entry, CodeBuffer *cbuf, Register tmp) {
void MacroAssembler::far_jump(Address entry, Register tmp) {
assert(ReservedCodeCacheSize < 4*G, "branch out of range");
assert(CodeCache::find_blob(entry.target()) != NULL,
"destination of far call not found in code cache");
Expand All @@ -2444,15 +2444,13 @@ void MacroAssembler::far_jump(Address entry, CodeBuffer *cbuf, Register tmp) {
// We can use auipc + jalr here because we know that the total size of
// the code cache cannot exceed 2Gb.
la_patchable(tmp, entry, offset);
if (cbuf != NULL) { cbuf->set_insts_mark(); }
jalr(x0, tmp, offset);
} else {
if (cbuf != NULL) { cbuf->set_insts_mark(); }
j(entry);
}
}

void MacroAssembler::far_call(Address entry, CodeBuffer *cbuf, Register tmp) {
void MacroAssembler::far_call(Address entry, Register tmp) {
assert(ReservedCodeCacheSize < 4*G, "branch out of range");
assert(CodeCache::find_blob(entry.target()) != NULL,
"destination of far call not found in code cache");
Expand All @@ -2461,10 +2459,8 @@ void MacroAssembler::far_call(Address entry, CodeBuffer *cbuf, Register tmp) {
// We can use auipc + jalr here because we know that the total size of
// the code cache cannot exceed 2Gb.
la_patchable(tmp, entry, offset);
if (cbuf != NULL) { cbuf->set_insts_mark(); }
jalr(x1, tmp, offset); // link
} else {
if (cbuf != NULL) { cbuf->set_insts_mark(); }
jal(entry); // link
}
}
Expand Down Expand Up @@ -2809,7 +2805,7 @@ void MacroAssembler::set_narrow_klass(Register dst, Klass* k) {

// Maybe emit a call via a trampoline. If the code cache is small
// trampolines won't be emitted.
address MacroAssembler::trampoline_call(Address entry, CodeBuffer* cbuf) {
address MacroAssembler::trampoline_call(Address entry) {
assert(JavaThread::current()->is_Compiler_thread(), "just checking");
assert(entry.rspec().type() == relocInfo::runtime_call_type ||
entry.rspec().type() == relocInfo::opt_virtual_call_type ||
Expand All @@ -2836,16 +2832,16 @@ address MacroAssembler::trampoline_call(Address entry, CodeBuffer* cbuf) {
}
}

if (cbuf != NULL) { cbuf->set_insts_mark(); }
address call_pc = pc();
relocate(entry.rspec());
if (!far_branches()) {
jal(entry.target());
} else {
jal(pc());
}
// just need to return a non-null address

postcond(pc() != badAddress);
return pc();
return call_pc;
}

address MacroAssembler::ic_call(address entry, jint method_index) {
Expand Down
7 changes: 4 additions & 3 deletions src/hotspot/cpu/riscv/macroAssembler_riscv.hpp
Expand Up @@ -598,8 +598,8 @@ class MacroAssembler: public Assembler {

// Jumps that can reach anywhere in the code cache.
// Trashes tmp.
void far_call(Address entry, CodeBuffer *cbuf = NULL, Register tmp = t0);
void far_jump(Address entry, CodeBuffer *cbuf = NULL, Register tmp = t0);
void far_call(Address entry, Register tmp = t0);
void far_jump(Address entry, Register tmp = t0);

static int far_branch_size() {
if (far_branches()) {
Expand Down Expand Up @@ -635,7 +635,8 @@ class MacroAssembler: public Assembler {
void get_polling_page(Register dest, relocInfo::relocType rtype);
address read_polling_page(Register r, int32_t offset, relocInfo::relocType rtype);

address trampoline_call(Address entry, CodeBuffer* cbuf = NULL);
// Return: the call PC or NULL if CodeCache is full.
address trampoline_call(Address entry);
address ic_call(address entry, jint method_index = 0);

// Support for memory inc/dec
Expand Down
8 changes: 4 additions & 4 deletions src/hotspot/cpu/riscv/riscv.ad
Expand Up @@ -2269,7 +2269,7 @@ encode %{
assert_cond(addr != NULL);
if (!_method) {
// A call to a runtime wrapper, e.g. new, new_typeArray_Java, uncommon_trap.
call = __ trampoline_call(Address(addr, relocInfo::runtime_call_type), &cbuf);
call = __ trampoline_call(Address(addr, relocInfo::runtime_call_type));
if (call == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
Expand All @@ -2278,7 +2278,7 @@ encode %{
int method_index = resolved_method_index(cbuf);
RelocationHolder rspec = _optimized_virtual ? opt_virtual_call_Relocation::spec(method_index)
: static_call_Relocation::spec(method_index);
call = __ trampoline_call(Address(addr, rspec), &cbuf);
call = __ trampoline_call(Address(addr, rspec));
if (call == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
Expand All @@ -2287,10 +2287,10 @@ encode %{
if (CodeBuffer::supports_shared_stubs() && _method->can_be_statically_bound()) {
// Calls of the same statically bound method can share
// a stub to the interpreter.
cbuf.shared_stub_to_interp_for(_method, cbuf.insts()->mark_off());
cbuf.shared_stub_to_interp_for(_method, call - cbuf.insts_begin());
} else {
// Emit stub for static call
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
address stub = CompiledStaticCall::emit_to_interp_stub(cbuf, call);
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return;
Expand Down

1 comment on commit 5feca68

@openjdk-notifier
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.