Skip to content

Commit 2c73a1f

Browse files
committed
8290324: Move atomic operations outside of os_xxx.hpp
Reviewed-by: dholmes, kbarrett
1 parent e8975be commit 2c73a1f

15 files changed

+176
-188
lines changed

src/hotspot/os_cpu/bsd_aarch64/os_bsd_aarch64.cpp

+7-2
Original file line numberDiff line numberDiff line change
@@ -549,6 +549,10 @@ void os::current_thread_enable_wx(WXMode mode) {
549549
pthread_jit_write_protect_np(mode == WXExec);
550550
}
551551

552+
static inline void atomic_copy64(const volatile void *src, volatile void *dst) {
553+
*(jlong *) dst = *(const jlong *) src;
554+
}
555+
552556
extern "C" {
553557
int SpinPause() {
554558
return 0;
@@ -582,18 +586,19 @@ extern "C" {
582586
*(to--) = *(from--);
583587
}
584588
}
589+
585590
void _Copy_conjoint_jlongs_atomic(const jlong* from, jlong* to, size_t count) {
586591
if (from > to) {
587592
const jlong *end = from + count;
588593
while (from < end)
589-
os::atomic_copy64(from++, to++);
594+
atomic_copy64(from++, to++);
590595
}
591596
else if (from < to) {
592597
const jlong *end = from;
593598
from += count - 1;
594599
to += count - 1;
595600
while (from >= end)
596-
os::atomic_copy64(from--, to--);
601+
atomic_copy64(from--, to--);
597602
}
598603
}
599604

src/hotspot/os_cpu/bsd_aarch64/os_bsd_aarch64.hpp

+1-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 1999, 2021, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved.
33
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
44
* Copyright (c) 2021, Azul Systems, Inc. All rights reserved.
55
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@@ -35,9 +35,4 @@
3535
// Note: Currently only used in 64 bit Windows implementations
3636
static bool register_code_area(char *low, char *high) { return true; }
3737

38-
// Atomically copy 64 bits of data
39-
static void atomic_copy64(const volatile void *src, volatile void *dst) {
40-
*(jlong *) dst = *(const jlong *) src;
41-
}
42-
4338
#endif // OS_CPU_BSD_AARCH64_OS_BSD_AARCH64_HPP

src/hotspot/os_cpu/bsd_zero/atomic_bsd_zero.hpp

+22-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
33
* Copyright 2007, 2008, 2011, 2015, Red Hat, Inc.
44
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
55
*
@@ -285,12 +285,31 @@ inline T Atomic::PlatformCmpxchg<8>::operator()(T volatile* dest,
285285
return value;
286286
}
287287

288+
// Atomically copy 64 bits of data
289+
static void atomic_copy64(const volatile void *src, volatile void *dst) {
290+
#if defined(PPC32)
291+
double tmp;
292+
asm volatile ("lfd %0, 0(%1)\n"
293+
"stfd %0, 0(%2)\n"
294+
: "=f"(tmp)
295+
: "b"(src), "b"(dst));
296+
#elif defined(S390) && !defined(_LP64)
297+
double tmp;
298+
asm volatile ("ld %0, 0(%1)\n"
299+
"std %0, 0(%2)\n"
300+
: "=r"(tmp)
301+
: "a"(src), "a"(dst));
302+
#else
303+
*(jlong *) dst = *(const jlong *) src;
304+
#endif
305+
}
306+
288307
template<>
289308
template<typename T>
290309
inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
291310
STATIC_ASSERT(8 == sizeof(T));
292311
volatile int64_t dest;
293-
os::atomic_copy64(reinterpret_cast<const volatile int64_t*>(src), reinterpret_cast<volatile int64_t*>(&dest));
312+
atomic_copy64(reinterpret_cast<const volatile int64_t*>(src), reinterpret_cast<volatile int64_t*>(&dest));
294313
return PrimitiveConversions::cast<T>(dest);
295314
}
296315

@@ -299,7 +318,7 @@ template<typename T>
299318
inline void Atomic::PlatformStore<8>::operator()(T volatile* dest,
300319
T store_value) const {
301320
STATIC_ASSERT(8 == sizeof(T));
302-
os::atomic_copy64(reinterpret_cast<const volatile int64_t*>(&store_value), reinterpret_cast<volatile int64_t*>(dest));
321+
atomic_copy64(reinterpret_cast<const volatile int64_t*>(&store_value), reinterpret_cast<volatile int64_t*>(dest));
303322
}
304323

305324
#endif // OS_CPU_BSD_ZERO_ATOMIC_BSD_ZERO_HPP

src/hotspot/os_cpu/bsd_zero/os_bsd_zero.cpp

+3-2
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
// no precompiled headers
3232
#include "jvm.h"
3333
#include "asm/assembler.inline.hpp"
34+
#include "atomic_bsd_zero.hpp"
3435
#include "classfile/vmSymbols.hpp"
3536
#include "code/icBuffer.hpp"
3637
#include "code/vtableStubs.hpp"
@@ -295,14 +296,14 @@ extern "C" {
295296
if (from > to) {
296297
const jlong *end = from + count;
297298
while (from < end)
298-
os::atomic_copy64(from++, to++);
299+
atomic_copy64(from++, to++);
299300
}
300301
else if (from < to) {
301302
const jlong *end = from;
302303
from += count - 1;
303304
to += count - 1;
304305
while (from >= end)
305-
os::atomic_copy64(from--, to--);
306+
atomic_copy64(from--, to--);
306307
}
307308
}
308309

src/hotspot/os_cpu/bsd_zero/os_bsd_zero.hpp

+1-20
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2003, 2019, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
33
* Copyright 2007, 2008, 2010 Red Hat, Inc.
44
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
55
*
@@ -32,23 +32,4 @@
3232
// Note: Currently only used in 64 bit Windows implementations
3333
static bool register_code_area(char *low, char *high) { return true; }
3434

35-
// Atomically copy 64 bits of data
36-
static void atomic_copy64(const volatile void *src, volatile void *dst) {
37-
#if defined(PPC32)
38-
double tmp;
39-
asm volatile ("lfd %0, 0(%1)\n"
40-
"stfd %0, 0(%2)\n"
41-
: "=f"(tmp)
42-
: "b"(src), "b"(dst));
43-
#elif defined(S390) && !defined(_LP64)
44-
double tmp;
45-
asm volatile ("ld %0, 0(%1)\n"
46-
"std %0, 0(%2)\n"
47-
: "=r"(tmp)
48-
: "a"(src), "a"(dst));
49-
#else
50-
*(jlong *) dst = *(const jlong *) src;
51-
#endif
52-
}
53-
5435
#endif // OS_CPU_BSD_ZERO_OS_BSD_ZERO_HPP

src/hotspot/os_cpu/linux_aarch64/os_linux_aarch64.cpp

+7-2
Original file line numberDiff line numberDiff line change
@@ -387,6 +387,10 @@ int os::extra_bang_size_in_bytes() {
387387
return 0;
388388
}
389389

390+
static inline void atomic_copy64(const volatile void *src, volatile void *dst) {
391+
*(jlong *) dst = *(const jlong *) src;
392+
}
393+
390394
extern "C" {
391395
int SpinPause() {
392396
using spin_wait_func_ptr_t = void (*)();
@@ -433,18 +437,19 @@ extern "C" {
433437
*(to--) = *(from--);
434438
}
435439
}
440+
436441
void _Copy_conjoint_jlongs_atomic(const jlong* from, jlong* to, size_t count) {
437442
if (from > to) {
438443
const jlong *end = from + count;
439444
while (from < end)
440-
os::atomic_copy64(from++, to++);
445+
atomic_copy64(from++, to++);
441446
}
442447
else if (from < to) {
443448
const jlong *end = from;
444449
from += count - 1;
445450
to += count - 1;
446451
while (from >= end)
447-
os::atomic_copy64(from--, to--);
452+
atomic_copy64(from--, to--);
448453
}
449454
}
450455

src/hotspot/os_cpu/linux_aarch64/os_linux_aarch64.hpp

+1-6
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved.
33
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
44
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
55
*
@@ -36,9 +36,4 @@
3636
// Note: Currently only used in 64 bit Windows implementations
3737
static bool register_code_area(char *low, char *high) { return true; }
3838

39-
// Atomically copy 64 bits of data
40-
static void atomic_copy64(const volatile void *src, volatile void *dst) {
41-
*(jlong *) dst = *(const jlong *) src;
42-
}
43-
4439
#endif // OS_CPU_LINUX_AARCH64_OS_LINUX_AARCH64_HPP

src/hotspot/os_cpu/linux_arm/atomic_linux_arm.hpp

+39-7
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2008, 2019, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 2008, 2022, Oracle and/or its affiliates. All rights reserved.
33
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
44
*
55
* This code is free software; you can redistribute it and/or modify it
@@ -25,11 +25,43 @@
2525
#ifndef OS_CPU_LINUX_ARM_ATOMIC_LINUX_ARM_HPP
2626
#define OS_CPU_LINUX_ARM_ATOMIC_LINUX_ARM_HPP
2727

28+
#include "memory/allStatic.hpp"
2829
#include "runtime/os.hpp"
2930
#include "runtime/vm_version.hpp"
3031

3132
// Implementation of class atomic
3233

34+
class ARMAtomicFuncs : AllStatic {
35+
public:
36+
typedef int64_t (*cmpxchg_long_func_t)(int64_t, int64_t, volatile int64_t*);
37+
typedef int64_t (*load_long_func_t)(const volatile int64_t*);
38+
typedef void (*store_long_func_t)(int64_t, volatile int64_t*);
39+
typedef int32_t (*atomic_add_func_t)(int32_t add_value, volatile int32_t *dest);
40+
typedef int32_t (*atomic_xchg_func_t)(int32_t exchange_value, volatile int32_t *dest);
41+
typedef int32_t (*cmpxchg_func_t)(int32_t, int32_t, volatile int32_t*);
42+
43+
static cmpxchg_long_func_t _cmpxchg_long_func;
44+
static load_long_func_t _load_long_func;
45+
static store_long_func_t _store_long_func;
46+
static atomic_add_func_t _add_func;
47+
static atomic_xchg_func_t _xchg_func;
48+
static cmpxchg_func_t _cmpxchg_func;
49+
50+
static int64_t cmpxchg_long_bootstrap(int64_t, int64_t, volatile int64_t*);
51+
52+
static int64_t load_long_bootstrap(const volatile int64_t*);
53+
54+
static void store_long_bootstrap(int64_t, volatile int64_t*);
55+
56+
static int32_t add_bootstrap(int32_t add_value, volatile int32_t *dest);
57+
58+
static int32_t xchg_bootstrap(int32_t exchange_value, volatile int32_t *dest);
59+
60+
static int32_t cmpxchg_bootstrap(int32_t compare_value,
61+
int32_t exchange_value,
62+
volatile int32_t *dest);
63+
};
64+
3365
/*
3466
* Atomic long operations on 32-bit ARM
3567
* ARM v7 supports LDREXD/STREXD synchronization instructions so no problem.
@@ -49,15 +81,15 @@ template<typename T>
4981
inline T Atomic::PlatformLoad<8>::operator()(T const volatile* src) const {
5082
STATIC_ASSERT(8 == sizeof(T));
5183
return PrimitiveConversions::cast<T>(
52-
(*os::atomic_load_long_func)(reinterpret_cast<const volatile int64_t*>(src)));
84+
(*ARMAtomicFuncs::_load_long_func)(reinterpret_cast<const volatile int64_t*>(src)));
5385
}
5486

5587
template<>
5688
template<typename T>
5789
inline void Atomic::PlatformStore<8>::operator()(T volatile* dest,
5890
T store_value) const {
5991
STATIC_ASSERT(8 == sizeof(T));
60-
(*os::atomic_store_long_func)(
92+
(*ARMAtomicFuncs::_store_long_func)(
6193
PrimitiveConversions::cast<int64_t>(store_value), reinterpret_cast<volatile int64_t*>(dest));
6294
}
6395

@@ -83,7 +115,7 @@ inline D Atomic::PlatformAdd<4>::add_and_fetch(D volatile* dest, I add_value,
83115
atomic_memory_order order) const {
84116
STATIC_ASSERT(4 == sizeof(I));
85117
STATIC_ASSERT(4 == sizeof(D));
86-
return add_using_helper<int32_t>(os::atomic_add_func, dest, add_value);
118+
return add_using_helper<int32_t>(ARMAtomicFuncs::_add_func, dest, add_value);
87119
}
88120

89121

@@ -93,7 +125,7 @@ inline T Atomic::PlatformXchg<4>::operator()(T volatile* dest,
93125
T exchange_value,
94126
atomic_memory_order order) const {
95127
STATIC_ASSERT(4 == sizeof(T));
96-
return xchg_using_helper<int32_t>(os::atomic_xchg_func, dest, exchange_value);
128+
return xchg_using_helper<int32_t>(ARMAtomicFuncs::_xchg_func, dest, exchange_value);
97129
}
98130

99131

@@ -108,15 +140,15 @@ inline int32_t reorder_cmpxchg_func(int32_t exchange_value,
108140
int32_t volatile* dest,
109141
int32_t compare_value) {
110142
// Warning: Arguments are swapped to avoid moving them for kernel call
111-
return (*os::atomic_cmpxchg_func)(compare_value, exchange_value, dest);
143+
return (*ARMAtomicFuncs::_cmpxchg_func)(compare_value, exchange_value, dest);
112144
}
113145

114146
inline int64_t reorder_cmpxchg_long_func(int64_t exchange_value,
115147
int64_t volatile* dest,
116148
int64_t compare_value) {
117149
assert(VM_Version::supports_cx8(), "Atomic compare and exchange int64_t not supported on this architecture!");
118150
// Warning: Arguments are swapped to avoid moving them for kernel call
119-
return (*os::atomic_cmpxchg_long_func)(compare_value, exchange_value, dest);
151+
return (*ARMAtomicFuncs::_cmpxchg_long_func)(compare_value, exchange_value, dest);
120152
}
121153

122154

0 commit comments

Comments
 (0)