Skip to content

Commit

Permalink
8312018: Improve reservation of class space and CDS
Browse files Browse the repository at this point in the history
8313669: Reduced chance for zero-based nKlass encoding since JDK-8296565

Reviewed-by: iklam, adinn
  • Loading branch information
tstuefe committed Aug 30, 2023
1 parent dd64a4a commit 89d18ea
Show file tree
Hide file tree
Showing 17 changed files with 747 additions and 115 deletions.
9 changes: 9 additions & 0 deletions src/hotspot/os/aix/os_aix.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2166,6 +2166,15 @@ char* os::pd_attempt_reserve_memory_at(char* requested_addr, size_t bytes, bool
return addr;
}

size_t os::vm_min_address() {
// On AIX, we need to make sure we don't block the sbrk. However, this is
// done at actual reservation time, where we honor a "no-mmap" area following
// the break. See MaxExpectedDataSegmentSize. So we can return a very low
// address here.
assert(is_aligned(_vm_min_address_default, os::vm_allocation_granularity()), "Sanity");
return _vm_min_address_default;
}

// Used to convert frequent JVM_Yield() to nops
bool os::dont_yield() {
return DontYieldALot;
Expand Down
11 changes: 11 additions & 0 deletions src/hotspot/os/bsd/os_bsd.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1822,6 +1822,17 @@ char* os::pd_attempt_reserve_memory_at(char* requested_addr, size_t bytes, bool
return nullptr;
}

size_t os::vm_min_address() {
#ifdef __APPLE__
// On MacOS, the lowest 4G are denied to the application (see "PAGEZERO" resp.
// -pagezero_size linker option).
return 4 * G;
#else
assert(is_aligned(_vm_min_address_default, os::vm_allocation_granularity()), "Sanity");
return _vm_min_address_default;
#endif
}

// Used to convert frequent JVM_Yield() to nops
bool os::dont_yield() {
return DontYieldALot;
Expand Down
18 changes: 18 additions & 0 deletions src/hotspot/os/linux/os_linux.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,7 @@
#include "services/runtimeService.hpp"
#include "utilities/align.hpp"
#include "utilities/checkedCast.hpp"
#include "utilities/debug.hpp"
#include "utilities/decoder.hpp"
#include "utilities/defaultStream.hpp"
#include "utilities/events.hpp"
Expand Down Expand Up @@ -4244,6 +4245,23 @@ char* os::pd_attempt_reserve_memory_at(char* requested_addr, size_t bytes, bool
return nullptr;
}

size_t os::vm_min_address() {
// Determined by sysctl vm.mmap_min_addr. It exists as a safety zone to prevent
// NULL pointer dereferences.
// Most distros set this value to 64 KB. It *can* be zero, but rarely is. Here,
// we impose a minimum value if vm.mmap_min_addr is too low, for increased protection.
static size_t value = 0;
if (value == 0) {
assert(is_aligned(_vm_min_address_default, os::vm_allocation_granularity()), "Sanity");
FILE* f = fopen("/proc/sys/vm/mmap_min_addr", "r");
if (fscanf(f, "%zu", &value) != 1) {
value = _vm_min_address_default;
}
value = MAX2(_vm_min_address_default, value);
}
return value;
}

// Used to convert frequent JVM_Yield() to nops
bool os::dont_yield() {
return DontYieldALot;
Expand Down
5 changes: 5 additions & 0 deletions src/hotspot/os/windows/os_windows.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3435,6 +3435,11 @@ char* os::pd_attempt_reserve_memory_at(char* addr, size_t bytes, bool exec) {
return res;
}

size_t os::vm_min_address() {
assert(is_aligned(_vm_min_address_default, os::vm_allocation_granularity()), "Sanity");
return _vm_min_address_default;
}

char* os::pd_attempt_map_memory_to_file_at(char* requested_addr, size_t bytes, int file_desc) {
assert(file_desc >= 0, "file_desc is not valid");
return map_memory_to_file(requested_addr, bytes, file_desc);
Expand Down
8 changes: 6 additions & 2 deletions src/hotspot/share/cds/metaspaceShared.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1328,8 +1328,12 @@ char* MetaspaceShared::reserve_address_space_for_archives(FileMapInfo* static_ma
total_space_rs = ReservedSpace(total_range_size, archive_space_alignment,
os::vm_page_size(), (char*) base_address);
} else {
// Reserve at any address, but leave it up to the platform to choose a good one.
total_space_rs = Metaspace::reserve_address_space_for_compressed_classes(total_range_size);
// We did not manage to reserve at the preferred address, or were instructed to relocate. In that
// case we reserve whereever possible, but the start address needs to be encodable as narrow Klass
// encoding base since the archived heap objects contain nKlass IDs precalculated toward the start
// of the shared Metaspace. That prevents us from using zero-based encoding and therefore we won't
// try allocating in low-address regions.
total_space_rs = Metaspace::reserve_address_space_for_compressed_classes(total_range_size, false /* try_in_low_address_ranges */);
}

if (!total_space_rs.is_reserved()) {
Expand Down
157 changes: 62 additions & 95 deletions src/hotspot/share/memory/metaspace.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
#include "utilities/debug.hpp"
#include "utilities/formatBuffer.hpp"
#include "utilities/globalDefinitions.hpp"
#include "virtualspace.hpp"

using metaspace::ChunkManager;
using metaspace::CommitLimiter;
Expand Down Expand Up @@ -581,86 +582,71 @@ bool Metaspace::class_space_is_initialized() {
return MetaspaceContext::context_class() != nullptr;
}

// Reserve a range of memory at an address suitable for en/decoding narrow
// Klass pointers (see: CompressedClassPointers::is_valid_base()).
// The returned address shall both be suitable as a compressed class pointers
// base, and aligned to Metaspace::reserve_alignment (which is equal to or a
// multiple of allocation granularity).
// On error, returns an unreserved space.
ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size) {

#if defined(AARCH64) || defined(PPC64)
const size_t alignment = Metaspace::reserve_alignment();

// AArch64: Try to align metaspace class space so that we can decode a
// compressed klass with a single MOVK instruction. We can do this iff the
// compressed class base is a multiple of 4G.
// Additionally, above 32G, ensure the lower LogKlassAlignmentInBytes bits
// of the upper 32-bits of the address are zero so we can handle a shift
// when decoding.

// PPC64: smaller heaps up to 2g will be mapped just below 4g. Then the
// attempt to place the compressed class space just after the heap fails on
// Linux 4.1.42 and higher because the launcher is loaded at 4g
// (ELF_ET_DYN_BASE). In that case we reach here and search the address space
// below 32g to get a zerobased CCS. For simplicity we reuse the search
// strategy for AARCH64.

static const struct {
address from;
address to;
size_t increment;
} search_ranges[] = {
{ (address)(4*G), (address)(32*G), 4*G, },
{ (address)(32*G), (address)(1024*G), (4 << LogKlassAlignmentInBytes) * G },
{ nullptr, nullptr, 0 }
};

// Calculate a list of all possible values for the starting address for the
// compressed class space.
ResourceMark rm;
GrowableArray<address> list(36);
for (int i = 0; search_ranges[i].from != nullptr; i ++) {
address a = search_ranges[i].from;
assert(CompressedKlassPointers::is_valid_base(a), "Sanity");
while (a < search_ranges[i].to) {
list.append(a);
a += search_ranges[i].increment;
// Reserve a range of memory that is to contain narrow Klass IDs. If "try_in_low_address_ranges"
// is true, we will attempt to reserve memory suitable for zero-based encoding.
ReservedSpace Metaspace::reserve_address_space_for_compressed_classes(size_t size, bool try_in_low_address_ranges) {

char* result = nullptr;
const bool randomize = RandomizeClassSpaceLocation;

// First try to reserve in low address ranges.
if (try_in_low_address_ranges) {
constexpr uintptr_t unscaled_max = ((uintptr_t)UINT_MAX + 1);
log_debug(metaspace, map)("Trying below " SIZE_FORMAT_X " for unscaled narrow Klass encoding", unscaled_max);
result = os::attempt_reserve_memory_between(nullptr, (char*)unscaled_max,
size, Metaspace::reserve_alignment(), randomize);
if (result == nullptr) {
constexpr uintptr_t zerobased_max = unscaled_max << LogKlassAlignmentInBytes;
log_debug(metaspace, map)("Trying below " SIZE_FORMAT_X " for zero-based narrow Klass encoding", zerobased_max);
result = os::attempt_reserve_memory_between((char*)unscaled_max, (char*)zerobased_max,
size, Metaspace::reserve_alignment(), randomize);
}
} // end: low-address reservation

#if defined(AARCH64) || defined(PPC64) || defined(S390)
if (result == nullptr) {
// Failing zero-based allocation, or in strict_base mode, try to come up with
// an optimized start address that is amenable to JITs that use 16-bit moves to
// load the encoding base as a short immediate.
// Therefore we try here for an address that when right-shifted by
// LogKlassAlignmentInBytes has only 1s in the third 16-bit quadrant.
//
// Example: for shift=3, the address space searched would be
// [0x0080_0000_0000 - 0xFFF8_0000_0000].

// Number of least significant bits that should be zero
constexpr int lo_zero_bits = 32 + LogKlassAlignmentInBytes;
// Number of most significant bits that should be zero
constexpr int hi_zero_bits = 16;

constexpr size_t alignment = nth_bit(lo_zero_bits);
assert(alignment >= Metaspace::reserve_alignment(), "Sanity");
constexpr uint64_t min = alignment;
constexpr uint64_t max = nth_bit(64 - hi_zero_bits);

log_debug(metaspace, map)("Trying between " UINT64_FORMAT_X " and " UINT64_FORMAT_X
" with " SIZE_FORMAT_X " alignment", min, max, alignment);
result = os::attempt_reserve_memory_between((char*)min, (char*)max, size, alignment, randomize);
}
#endif // defined(AARCH64) || defined(PPC64) || defined(S390)

int len = list.length();
int r = 0;
if (!DumpSharedSpaces) {
// Starting from a random position in the list. If the address cannot be reserved
// (the OS already assigned it for something else), go to the next position, wrapping
// around if necessary, until we exhaust all the items.
os::init_random((int)os::javaTimeNanos());
r = os::random();
log_info(metaspace)("Randomizing compressed class space: start from %d out of %d locations",
r % len, len);
if (result == nullptr) {
// Fallback: reserve anywhere and hope the resulting block is usable.
log_debug(metaspace, map)("Trying anywhere...");
result = os::reserve_memory_aligned(size, Metaspace::reserve_alignment(), false);
}
for (int i = 0; i < len; i++) {
address a = list.at((i + r) % len);
ReservedSpace rs(size, Metaspace::reserve_alignment(),
os::vm_page_size(), (char*)a);
if (rs.is_reserved()) {
assert(a == (address)rs.base(), "Sanity");
return rs;
}

// Wrap resulting range in ReservedSpace
ReservedSpace rs;
if (result != nullptr) {
assert(is_aligned(result, Metaspace::reserve_alignment()), "Alignment too small for metaspace");
rs = ReservedSpace::space_for_range(result, size, Metaspace::reserve_alignment(),
os::vm_page_size(), false, false);
} else {
rs = ReservedSpace();
}
#endif // defined(AARCH64) || defined(PPC64)

#ifdef AARCH64
// Note: on AARCH64, if the code above does not find any good placement, we
// have no recourse. We return an empty space and the VM will exit.
return ReservedSpace();
#else
// Default implementation: Just reserve anywhere.
return ReservedSpace(size, Metaspace::reserve_alignment(), os::vm_page_size(), (char*)nullptr);
#endif // AARCH64
return rs;
}

#endif // _LP64

size_t Metaspace::reserve_alignment_words() {
Expand Down Expand Up @@ -781,14 +767,13 @@ void Metaspace::global_initialize() {
// case (b) (No CDS)
ReservedSpace rs;
const size_t size = align_up(CompressedClassSpaceSize, Metaspace::reserve_alignment());
address base = nullptr;

// If CompressedClassSpaceBaseAddress is set, we attempt to force-map class space to
// the given address. This is a debug-only feature aiding tests. Due to the ASLR lottery
// this may fail, in which case the VM will exit after printing an appropriate message.
// Tests using this switch should cope with that.
if (CompressedClassSpaceBaseAddress != 0) {
base = (address)CompressedClassSpaceBaseAddress;
const address base = (address)CompressedClassSpaceBaseAddress;
if (!is_aligned(base, Metaspace::reserve_alignment())) {
vm_exit_during_initialization(
err_msg("CompressedClassSpaceBaseAddress=" PTR_FORMAT " invalid "
Expand All @@ -806,27 +791,9 @@ void Metaspace::global_initialize() {
}
}

if (!rs.is_reserved()) {
// If UseCompressedOops=1 and the java heap has been placed in coops-friendly
// territory, i.e. its base is under 32G, then we attempt to place ccs
// right above the java heap.
// Otherwise the lower 32G are still free. We try to place ccs at the lowest
// allowed mapping address.
base = (UseCompressedOops && (uint64_t)CompressedOops::base() < OopEncodingHeapMax) ?
CompressedOops::end() : (address)HeapBaseMinAddress;
base = align_up(base, Metaspace::reserve_alignment());

if (base != nullptr) {
if (CompressedKlassPointers::is_valid_base(base)) {
rs = ReservedSpace(size, Metaspace::reserve_alignment(),
os::vm_page_size(), (char*)base);
}
}
}

// ...failing that, reserve anywhere, but let platform do optimized placement:
if (!rs.is_reserved()) {
rs = Metaspace::reserve_address_space_for_compressed_classes(size);
rs = Metaspace::reserve_address_space_for_compressed_classes(size, true);
}

// ...failing that, give up.
Expand Down
10 changes: 3 additions & 7 deletions src/hotspot/share/memory/metaspace.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,9 @@ class Metaspace : public AllStatic {

#ifdef _LP64

// Reserve a range of memory at an address suitable for en/decoding narrow
// Klass pointers (see: CompressedClassPointers::is_valid_base()).
// The returned address shall both be suitable as a compressed class pointers
// base, and aligned to Metaspace::reserve_alignment (which is equal to or a
// multiple of allocation granularity).
// On error, returns an unreserved space.
static ReservedSpace reserve_address_space_for_compressed_classes(size_t size);
// Reserve a range of memory that is to contain narrow Klass IDs. If "try_in_low_address_ranges"
// is true, we will attempt to reserve memory suitable for zero-based encoding.
static ReservedSpace reserve_address_space_for_compressed_classes(size_t size, bool try_in_low_address_ranges);

// Given a prereserved space, use that to set up the compressed class space list.
static void initialize_class_space(ReservedSpace rs);
Expand Down
21 changes: 11 additions & 10 deletions src/hotspot/share/memory/virtualspace.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,17 @@ void ReservedSpace::release() {
}
}

// Put a ReservedSpace over an existing range
ReservedSpace ReservedSpace::space_for_range(char* base, size_t size, size_t alignment,
size_t page_size, bool special, bool executable) {
assert(is_aligned(base, os::vm_allocation_granularity()), "Unaligned base");
assert(is_aligned(size, os::vm_page_size()), "Unaligned size");
assert(os::page_sizes().contains(page_size), "Invalid pagesize");
ReservedSpace space;
space.initialize_members(base, size, alignment, page_size, special, executable);
return space;
}

static size_t noaccess_prefix_size(size_t alignment) {
return lcm(os::vm_page_size(), alignment);
}
Expand Down Expand Up @@ -546,17 +557,7 @@ void ReservedHeapSpace::initialize_compressed_heap(const size_t size, size_t ali
}

// zerobased: Attempt to allocate in the lower 32G.
// But leave room for the compressed class pointers, which is allocated above
// the heap.
char *zerobased_max = (char *)OopEncodingHeapMax;
const size_t class_space = align_up(CompressedClassSpaceSize, alignment);
// For small heaps, save some space for compressed class pointer
// space so it can be decoded with no base.
if (UseCompressedClassPointers && !UseSharedSpaces && !DumpSharedSpaces &&
OopEncodingHeapMax <= KlassEncodingMetaspaceMax &&
(uint64_t)(aligned_heap_base_min_address + size + class_space) <= KlassEncodingMetaspaceMax) {
zerobased_max = (char *)OopEncodingHeapMax - class_space;
}

// Give it several tries from top of range to bottom.
if (aligned_heap_base_min_address + size <= zerobased_max && // Zerobased theoretical possible.
Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/share/memory/virtualspace.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,10 @@ class ReservedSpace {
bool contains(const void* p) const {
return (base() <= ((char*)p)) && (((char*)p) < (base() + size()));
}

// Put a ReservedSpace over an existing range
static ReservedSpace space_for_range(char* base, size_t size, size_t alignment,
size_t page_size, bool special, bool executable);
};

ReservedSpace
Expand Down
3 changes: 2 additions & 1 deletion src/hotspot/share/oops/compressedKlass.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ void CompressedKlassPointers::initialize_for_given_encoding(address addr, size_t
// will encounter (and the implicit promise that there will be no Klass
// structures outside this range).
void CompressedKlassPointers::initialize(address addr, size_t len) {
assert(is_valid_base(addr), "Address must be a valid encoding base");
address const end = addr + len;

address base;
Expand Down Expand Up @@ -90,6 +89,8 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
set_base(base);
set_shift(shift);
set_range(range);

assert(is_valid_base(_base), "Address must be a valid encoding base");
}

// Given an address p, return true if p can be used as an encoding base.
Expand Down
3 changes: 3 additions & 0 deletions src/hotspot/share/runtime/globals.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -1419,6 +1419,9 @@ const int ObjectAlignmentInBytes = 8;
"Force the class space to be allocated at this address or " \
"fails VM initialization (requires -Xshare=off.") \
\
develop(bool, RandomizeClassSpaceLocation, true, \
"Randomize location of class space.") \
\
product(bool, PrintMetaspaceStatisticsAtExit, false, DIAGNOSTIC, \
"Print metaspace statistics upon VM exit.") \
\
Expand Down
Loading

1 comment on commit 89d18ea

@openjdk-notifier
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.