Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
c294550
complete stub save and restore for aarch64
adinn Nov 10, 2025
53cf355
count and log stubgen blobs
adinn Nov 11, 2025
1397164
remove redundant publication of external addresses
adinn Nov 13, 2025
3f052ce
fix missing external address
adinn Nov 17, 2025
32f1320
fix external address problem
adinn Nov 17, 2025
b9adda3
fix stub publish/save checks
adinn Nov 17, 2025
16f4d1f
move handler management routines to shared code
adinn Nov 17, 2025
9f27b2b
first complete version of x86 stub save and restore
adinn Nov 18, 2025
24a7c88
fix x86 avx2 stub generation
adinn Nov 19, 2025
1da6590
fix zero/minimal build issues
adinn Nov 19, 2025
3774e5f
fix more zero issues
adinn Nov 19, 2025
2043042
correct var init on x86
adinn Nov 19, 2025
351712a
update zero port with required changes
adinn Nov 19, 2025
63d4d1c
update arm/ppc/riscv/s390/ ports with required changes
adinn Nov 19, 2025
7cf62b2
fix typo
adinn Nov 19, 2025
042be3f
fix format issue
adinn Nov 20, 2025
9cd7376
adjust extry/extra address offset encoding to distinguish end from nu…
adinn Nov 25, 2025
d20c218
tighten up unsafe handler address checks
adinn Nov 25, 2025
e9442c3
use simpler AOT lookup in all x86 stub generators
adinn Nov 26, 2025
1aab99c
add missing aot+codecache+init statistic
adinn Nov 26, 2025
a6ea637
use simpler AOT lookup in all aarch64 stub generators
adinn Nov 26, 2025
022cb3c
fix typos
adinn Nov 26, 2025
29b478c
correct assert
adinn Nov 26, 2025
1a996eb
remove redundant comment
adinn Nov 26, 2025
af40c88
rmeove whitespace
adinn Nov 26, 2025
59cc4b2
more whitespace
adinn Nov 26, 2025
cbf7540
merge
adinn Nov 26, 2025
765d9d6
fix extras count to match number of unsafe handler regions
adinn Nov 26, 2025
f3d7bfa
put AOT address table init code under INCLUDE_CDS
adinn Nov 26, 2025
094e173
move AOT address init impl into arch tree below StubRoutines
adinn Nov 27, 2025
0597bc6
fix header declarations
adinn Nov 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3306,7 +3306,7 @@ void MacroAssembler::subw(Register Rd, Register Rn, RegisterOrConstant decrement
void MacroAssembler::reinit_heapbase()
{
if (UseCompressedOops) {
if (Universe::is_fully_initialized()) {
if (Universe::is_fully_initialized() && !AOTCodeCache::is_on_for_dump()) {
mov(rheapbase, CompressedOops::base());
} else {
lea(rheapbase, ExternalAddress(CompressedOops::base_addr()));
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/cpu/aarch64/runtime_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() {
assert(SimpleRuntimeFrame::framesize % 4 == 0, "sp not 16-byte aligned");

const char* name = OptoRuntime::stub_name(StubId::c2_exception_id);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)BlobId::c2_exception_id, name);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, BlobId::c2_exception_id);
if (blob != nullptr) {
return blob->as_exception_blob();
}
Expand Down
50 changes: 45 additions & 5 deletions src/hotspot/cpu/aarch64/stubDeclarations_aarch64.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,7 @@
do_stub(compiler, count_positives) \
do_arch_entry(aarch64, compiler, count_positives, count_positives, \
count_positives) \
do_stub(compiler, count_positives_long) \
do_arch_entry(aarch64, compiler, count_positives_long, \
do_arch_entry(aarch64, compiler, count_positives, \
count_positives_long, count_positives_long) \
do_stub(compiler, compare_long_string_LL) \
do_arch_entry(aarch64, compiler, compare_long_string_LL, \
Expand All @@ -108,8 +107,9 @@
do_stub(compiler, string_indexof_linear_ul) \
do_arch_entry(aarch64, compiler, string_indexof_linear_ul, \
string_indexof_linear_ul, string_indexof_linear_ul) \
/* this uses the entry for ghash_processBlocks */ \
do_stub(compiler, ghash_processBlocks_wide) \
do_stub(compiler, ghash_processBlocks_small) \
do_arch_entry(aarch64, compiler, ghash_processBlocks_small, \
ghash_processBlocks_small, ghash_processBlocks_small) \


#define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \
Expand Down Expand Up @@ -139,9 +139,49 @@
do_stub(final, spin_wait) \
do_arch_entry_init(aarch64, final, spin_wait, spin_wait, \
spin_wait, empty_spin_wait) \
/* stub only -- entries are not stored in StubRoutines::aarch64 */ \
/* n.b. these are not the same as the generic atomic stubs */ \
do_stub(final, atomic_entry_points) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_impl, atomic_fetch_add_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_impl, atomic_fetch_add_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_relaxed_impl, \
atomic_fetch_add_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_relaxed_impl, \
atomic_fetch_add_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_4_impl, atomic_xchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_8_impl, atomic_xchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_impl, atomic_cmpxchg_1_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_impl, atomic_cmpxchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_impl, atomic_cmpxchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_relaxed_impl, \
atomic_cmpxchg_1_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_relaxed_impl, \
atomic_cmpxchg_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_relaxed_impl, \
atomic_cmpxchg_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_release_impl, \
atomic_cmpxchg_4_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_release_impl, \
atomic_cmpxchg_8_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_seq_cst_impl, \
atomic_cmpxchg_4_seq_cst_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_seq_cst_impl, \
atomic_cmpxchg_8_seq_cst_impl) \


#endif // CPU_AARCH64_STUBDECLARATIONS_HPP
Loading