Skip to content
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
3e2aab6
Add Unsafe.setMemory as intrinsic
asgibbons Mar 27, 2024
2334b03
Added actual intrinsic
asgibbons Mar 27, 2024
6eebcbd
Removed setMemory1; debugged intrinsic code
asgibbons Mar 28, 2024
7c73856
Test removing intrinsic
asgibbons Mar 28, 2024
74c47e2
Add benchmark
asgibbons Mar 28, 2024
6e283bc
Restore intrinsic
asgibbons Mar 28, 2024
44c24ec
Address review comment
asgibbons Mar 29, 2024
b17a1f4
Fixed bug - incorrect interface to *_fill_entry
asgibbons Mar 29, 2024
401a2a9
Clean up code for PR
asgibbons Mar 29, 2024
c5cb30c
Use non-sse fill (old left in)
asgibbons Apr 1, 2024
6ee69c8
Remove dead code
asgibbons Apr 1, 2024
3aa60a4
Addressing review comments.
asgibbons Apr 2, 2024
8bed156
Fix Windows
asgibbons Apr 3, 2024
b025318
Fixed generate_fill when count > 0x80000000
asgibbons Apr 5, 2024
fd6f04f
Oops
asgibbons Apr 6, 2024
f81aaa9
Add movq to locate_operand
asgibbons Apr 8, 2024
b0ac857
Address review comments (#15)
asgibbons Apr 11, 2024
95230e2
Set memory test (#16)
asgibbons Apr 11, 2024
41ffcc3
Merge master
asgibbons Apr 11, 2024
b99499a
Fix whitespace error.
asgibbons Apr 11, 2024
89db3eb
Addressing more review comments
asgibbons Apr 11, 2024
970c575
Addressing yet more review comments
asgibbons Apr 12, 2024
6e731c8
Even more review comments
asgibbons Apr 12, 2024
405e4e0
Change fill routines
asgibbons Apr 15, 2024
95b0a34
Rename UnsafeCopyMemory{,Mark} to UnsafeMemory{Access,Mark} (#19)
asgibbons Apr 15, 2024
44cc91b
Only add a memory mark for byte unaligned fill
asgibbons Apr 15, 2024
824fb60
Set memory test (#21)
asgibbons Apr 15, 2024
80b5a0c
Set memory test (#22)
asgibbons Apr 15, 2024
856464e
Set memory test (#23)
asgibbons Apr 15, 2024
116d7dd
Merge branch 'openjdk:master' into setMemory
asgibbons Apr 15, 2024
113aa90
Fix memory mark after sync to upstream
asgibbons Apr 15, 2024
7a1d67e
Add enter() and leave(); remove Windows-specific register stuff
asgibbons Apr 16, 2024
dccf6b6
Address review comments; update copyright years
asgibbons Apr 19, 2024
dd0094e
Review comments
asgibbons Apr 19, 2024
1961624
Long to short jmp; other cleanup
asgibbons Apr 19, 2024
c129016
Fix UnsafeCopyMemoryMark scope issue
asgibbons Apr 20, 2024
1122b50
Merge branch 'openjdk:master' into setMemory
asgibbons Apr 20, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions src/hotspot/cpu/aarch64/stubGenerator_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8336,6 +8336,11 @@ class StubGenerator: public StubCodeGenerator {
UnsafeCopyMemory::create_table(8);
}

// Initialize table for fill memory check.
if (UnsafeCopyMemory::_table == nullptr) {
UnsafeCopyMemory::create_table(8);
}

if (UseCRC32Intrinsics) {
// set table address before stub generation which use it
StubRoutines::_crc_table_adr = (address)StubRoutines::aarch64::_crc_table;
Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/cpu/arm/stubGenerator_arm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3138,6 +3138,10 @@ class StubGenerator: public StubCodeGenerator {
UnsafeCopyMemory::create_table(32);
}

if (UnsafeSetMemory::_table == nullptr) {
UnsafeSetMemory::create_table(32);
}

// integer division used both by interpreter and compiler
StubRoutines::Arm::_idiv_irem_entry = generate_idiv_irem();

Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/cpu/ppc/stubGenerator_ppc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4749,6 +4749,10 @@ class StubGenerator: public StubCodeGenerator {
UnsafeCopyMemory::create_table(8);
}

if (UnsafeSetMemory::_table == nullptr) {
UnsafeSetMemory::create_table(8);
}

// Build this early so it's available for the interpreter.
StubRoutines::_throw_StackOverflowError_entry =
generate_throw_exception("StackOverflowError throw_exception",
Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/cpu/riscv/stubGenerator_riscv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5483,6 +5483,10 @@ static const int64_t right_3_bits = right_n_bits(3);
UnsafeCopyMemory::create_table(8);
}

if (UnsafeSetMemory::_table == nullptr) {
UnsafeSetMemory::create_table(8);
}

StubRoutines::_call_stub_entry =
generate_call_stub(StubRoutines::_call_stub_return_address);

Expand Down
5 changes: 5 additions & 0 deletions src/hotspot/cpu/x86/stubGenerator_x86_32.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4125,6 +4125,11 @@ class StubGenerator: public StubCodeGenerator {
UnsafeCopyMemory::create_table(16);
}

// Initialize table for fill memory check.
if (UnsafeSetMemory::_table == nullptr) {
UnsafeSetMemory::create_table(8);
}

StubRoutines::x86::_verify_mxcsr_entry = generate_verify_mxcsr();
StubRoutines::x86::_verify_fpu_cntrl_wrd_entry = generate_verify_fpu_cntrl_wrd();
StubRoutines::x86::_d2i_wrapper = generate_d2i_wrapper(T_INT, CAST_FROM_FN_PTR(address, SharedRuntime::d2i));
Expand Down
4 changes: 2 additions & 2 deletions src/hotspot/cpu/x86/stubGenerator_x86_64_arraycopy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -152,10 +152,10 @@ void StubGenerator::generate_arraycopy_stubs() {
StubRoutines::_arrayof_jshort_fill = generate_fill(T_SHORT, true, "arrayof_jshort_fill");
StubRoutines::_arrayof_jint_fill = generate_fill(T_INT, true, "arrayof_jint_fill");

#ifdef _LP64
// #ifdef _LP64
StubRoutines::_unsafe_setmemory =
generate_unsafe_setmemory("unsafe_setmemory", StubRoutines::_jbyte_fill);
#endif
// #endif

// We don't generate specialized code for HeapWord-aligned source
// arrays, so just use the code we've already generated
Expand Down
9 changes: 8 additions & 1 deletion src/hotspot/os_cpu/aix_ppc/os_aix_ppc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -341,11 +341,15 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = cb ? cb->as_compiled_method_or_null() : nullptr;
bool is_unsafe_arraycopy = (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy) {
bool is_unsafe_setmemory = (thread->doing_unsafe_access() && UnsafeSetMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy || is_unsafe_setmemory) {
address next_pc = pc + 4;
if (is_unsafe_arraycopy) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (is_unsafe_setmemory) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
next_pc = SharedRuntime::handle_unsafe_access(thread, next_pc);
os::Posix::ucontext_set_pc(uc, next_pc);
return true;
Expand All @@ -371,6 +375,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
next_pc = SharedRuntime::handle_unsafe_access(thread, next_pc);
os::Posix::ucontext_set_pc(uc, next_pc);
return true;
Expand Down
9 changes: 8 additions & 1 deletion src/hotspot/os_cpu/bsd_aarch64/os_bsd_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -258,11 +258,15 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = (cb != nullptr) ? cb->as_compiled_method_or_null() : nullptr;
bool is_unsafe_arraycopy = (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy) {
bool is_unsafe_setmemory = (thread->doing_unsafe_access() && UnsafeSetMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy || is_unsafe_setmemory) {
address next_pc = pc + NativeCall::instruction_size;
if (is_unsafe_arraycopy) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (is_unsafe_setmemory) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}
} else if (sig == SIGILL && nativeInstruction_at(pc)->is_stop()) {
Expand Down Expand Up @@ -302,6 +306,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}

Expand Down
7 changes: 7 additions & 0 deletions src/hotspot/os_cpu/linux_aarch64/os_linux_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -241,11 +241,15 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = (cb != nullptr) ? cb->as_compiled_method_or_null() : nullptr;
bool is_unsafe_arraycopy = (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc));
bool is_unsafe_setmemory = (thread->doing_unsafe_access() && UnsafeSetMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy) {
address next_pc = pc + NativeCall::instruction_size;
if (is_unsafe_arraycopy) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (is_unsafe_setmemory) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}
} else if (sig == SIGILL && nativeInstruction_at(pc)->is_stop()) {
Expand Down Expand Up @@ -289,6 +293,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}

Expand Down
23 changes: 16 additions & 7 deletions src/hotspot/os_cpu/linux_arm/os_linux_arm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -324,17 +324,23 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
// Do not crash the VM in such a case.
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = (cb != nullptr) ? cb->as_compiled_method_or_null() : nullptr;
if ((nm != nullptr && nm->has_unsafe_access()) || (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc))) {
if ((nm != nullptr && nm->has_unsafe_access()) ||
(thread->doing_unsafe_access() &&
(UnsafeCopyMemory::contains_pc(pc) ||
UnsafeSetMemory::contains_pc(pc)))) {
unsafe_access = true;
}
} else if (sig == SIGSEGV &&
MacroAssembler::uses_implicit_null_check(info->si_addr)) {
// Determination of interpreter/vtable stub/compiled code null exception
CodeBlob* cb = CodeCache::find_blob(pc);
if (cb != nullptr) {
stub = SharedRuntime::continuation_for_implicit_exception(thread, pc, SharedRuntime::IMPLICIT_NULL);
}
} else if (sig == SIGILL && *(int *)pc == NativeInstruction::not_entrant_illegal_instruction) {
// Determination of interpreter/vtable stub/compiled code null exception
CodeBlob* cb = CodeCache::find_blob(pc);
if (cb != nullptr) {
stub = SharedRuntime::continuation_for_implicit_exception(
thread, pc, SharedRuntime::IMPLICIT_NULL);
}
} else if (sig == SIGILL &&
*(int*)pc ==
NativeInstruction::not_entrant_illegal_instruction) {
// Not entrant
stub = SharedRuntime::get_handle_wrong_method_stub();
}
Expand Down Expand Up @@ -362,6 +368,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
#ifdef __thumb__
if (uc->uc_mcontext.arm_cpsr & PSR_T_BIT) {
next_pc = (address)((intptr_t)next_pc | 0x1);
Expand Down
9 changes: 8 additions & 1 deletion src/hotspot/os_cpu/linux_ppc/os_linux_ppc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -356,11 +356,15 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = (cb != nullptr) ? cb->as_compiled_method_or_null() : nullptr;
bool is_unsafe_arraycopy = (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy) {
bool is_unsafe_setmemory = (thread->doing_unsafe_access() && UnsafeSetMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy || is_unsafe_setmemory) {
address next_pc = pc + 4;
if (is_unsafe_arraycopy) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (is_unsafe_setmemory) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
next_pc = SharedRuntime::handle_unsafe_access(thread, next_pc);
os::Posix::ucontext_set_pc(uc, next_pc);
return true;
Expand All @@ -382,6 +386,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
next_pc = SharedRuntime::handle_unsafe_access(thread, next_pc);
os::Posix::ucontext_set_pc(uc, next_pc);
return true;
Expand Down
9 changes: 8 additions & 1 deletion src/hotspot/os_cpu/linux_riscv/os_linux_riscv.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,15 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
CodeBlob* cb = CodeCache::find_blob(pc);
CompiledMethod* nm = (cb != nullptr) ? cb->as_compiled_method_or_null() : nullptr;
bool is_unsafe_arraycopy = (thread->doing_unsafe_access() && UnsafeCopyMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy) {
bool is_unsafe_setmemory = (thread->doing_unsafe_access() && UnsafeSetMemory::contains_pc(pc));
if ((nm != nullptr && nm->has_unsafe_access()) || is_unsafe_arraycopy || is_unsafe_setmemory) {
address next_pc = Assembler::locate_next_instruction(pc);
if (is_unsafe_arraycopy) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (is_unsafe_setmemory) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}
} else if (sig == SIGILL && nativeInstruction_at(pc)->is_stop()) {
Expand Down Expand Up @@ -275,6 +279,9 @@ bool PosixSignals::pd_hotspot_signal_handler(int sig, siginfo_t* info,
if (UnsafeCopyMemory::contains_pc(pc)) {
next_pc = UnsafeCopyMemory::page_error_continue_pc(pc);
}
if (UnsafeSetMemory::contains_pc(pc)) {
next_pc = UnsafeSetMemory::page_error_continue_pc(pc);
}
stub = SharedRuntime::handle_unsafe_access(thread, next_pc);
}

Expand Down
49 changes: 49 additions & 0 deletions src/hotspot/share/utilities/copy.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -208,3 +208,52 @@ void Copy::conjoint_copy(const void* src, void* dst, size_t byte_count, size_t e
void Copy::conjoint_swap(const void* src, void* dst, size_t byte_count, size_t elem_size) {
CopySwap::conjoint_swap_if_needed<true>(src, dst, byte_count, elem_size);
}

// Fill bytes; larger units are filled atomically if everything is aligned.
void Copy::fill_to_memory_atomic(void* to, size_t size, jubyte value) {
address dst = (address)to;
uintptr_t bits = (uintptr_t)to | (uintptr_t)size;
if (bits % sizeof(jlong) == 0) {
jlong fill = (julong)((jubyte)value); // zero-extend
if (fill != 0) {
fill += fill << 8;
fill += fill << 16;
fill += fill << 32;
}
// Copy::fill_to_jlongs_atomic((jlong*) dst, size / sizeof(jlong));
for (uintptr_t off = 0; off < size; off += sizeof(jlong)) {
*(jlong*)(dst + off) = fill;
}
} else if (bits % sizeof(jint) == 0) {
jint fill = (juint)((jubyte)value); // zero-extend
if (fill != 0) {
fill += fill << 8;
fill += fill << 16;
}
// Copy::fill_to_jints_atomic((jint*) dst, size / sizeof(jint));
for (uintptr_t off = 0; off < size; off += sizeof(jint)) {
*(jint*)(dst + off) = fill;
}
} else if (bits % sizeof(jshort) == 0) {
jshort fill = (jushort)((jubyte)value); // zero-extend
fill += (jshort)(fill << 8);
// Copy::fill_to_jshorts_atomic((jshort*) dst, size / sizeof(jshort));
for (uintptr_t off = 0; off < size; off += sizeof(jshort)) {
*(jshort*)(dst + off) = fill;
}
} else {
// Not aligned, so no need to be atomic.
#ifdef MUSL_LIBC
// This code is used by Unsafe and may hit the next page after truncation
// of mapped memory. Therefore, we use volatile to prevent compilers from
// replacing the loop by memset which may not trigger SIGBUS as needed
// (observed on Alpine Linux x86_64)
jbyte fill = value;
for (uintptr_t off = 0; off < size; off += sizeof(jbyte)) {
*(volatile jbyte*)(dst + off) = fill;
}
#else
Copy::fill_to_bytes(dst, size, value);
#endif
}
}
53 changes: 2 additions & 51 deletions src/hotspot/share/utilities/copy.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -282,57 +282,8 @@ class Copy : AllStatic {
// longs, words, or ints, store to those units atomically.
// The largest atomic transfer unit is 8 bytes, or the largest power
// of two which divides both to and size, whichever is smaller.

// Fill bytes; larger units are filled atomically if everything is aligned.
inline static void fill_to_memory_atomic(void* to, size_t size,
jubyte value) {
address dst = (address)to;
uintptr_t bits = (uintptr_t)to | (uintptr_t)size;
if (bits % sizeof(jlong) == 0) {
jlong fill = (julong)((jubyte)value); // zero-extend
if (fill != 0) {
fill += fill << 8;
fill += fill << 16;
fill += fill << 32;
}
// Copy::fill_to_jlongs_atomic((jlong*) dst, size / sizeof(jlong));
for (uintptr_t off = 0; off < size; off += sizeof(jlong)) {
*(jlong*)(dst + off) = fill;
}
} else if (bits % sizeof(jint) == 0) {
jint fill = (juint)((jubyte)value); // zero-extend
if (fill != 0) {
fill += fill << 8;
fill += fill << 16;
}
// Copy::fill_to_jints_atomic((jint*) dst, size / sizeof(jint));
for (uintptr_t off = 0; off < size; off += sizeof(jint)) {
*(jint*)(dst + off) = fill;
}
} else if (bits % sizeof(jshort) == 0) {
jshort fill = (jushort)((jubyte)value); // zero-extend
fill += (jshort)(fill << 8);
// Copy::fill_to_jshorts_atomic((jshort*) dst, size / sizeof(jshort));
for (uintptr_t off = 0; off < size; off += sizeof(jshort)) {
*(jshort*)(dst + off) = fill;
}
} else {
// Not aligned, so no need to be atomic.
#ifdef MUSL_LIBC
// This code is used by Unsafe and may hit the next page after truncation
// of mapped memory. Therefore, we use volatile to prevent compilers from
// replacing the loop by memset which may not trigger SIGBUS as needed
// (observed on Alpine Linux x86_64)
jbyte fill = value;
for (uintptr_t off = 0; off < size; off += sizeof(jbyte)) {
*(volatile jbyte*)(dst + off) = fill;
}
#else
Copy::fill_to_bytes(dst, size, value);
#endif
}
}

static void fill_to_memory_atomic(void* to, size_t size, jubyte value = 0);

// Zero-fill methods

// Zero word-aligned words, not atomic on each word
Expand Down