32
32
33
33
template <typename T>
34
34
inline T UnifiedOopRef::addr () const {
35
- return reinterpret_cast <T>(_value & ~ uintptr_t( 3 ));
35
+ return reinterpret_cast <T>(UnifiedOopRef::addr< uintptr_t >( ));
36
36
}
37
37
38
38
// Visual Studio 2019 and earlier have a problem with reinterpret_cast
@@ -42,47 +42,70 @@ inline T UnifiedOopRef::addr() const {
42
42
// this specialization provides a workaround.
43
43
template <>
44
44
inline uintptr_t UnifiedOopRef::addr<uintptr_t >() const {
45
- return _value & ~uintptr_t ( 3 );
45
+ return ( _value & ~tag_mask) LP64_ONLY (>> 1 );
46
46
}
47
47
48
48
inline bool UnifiedOopRef::is_narrow () const {
49
- return _value & 1 ;
49
+ return ( _value & narrow_tag) != 0 ;
50
50
}
51
51
52
52
inline bool UnifiedOopRef::is_native () const {
53
- return _value & 2 ;
53
+ return (_value & native_tag) != 0 ;
54
+ }
55
+
56
+ inline bool UnifiedOopRef::is_raw () const {
57
+ return (_value & raw_tag) != 0 ;
54
58
}
55
59
56
60
inline bool UnifiedOopRef::is_null () const {
57
61
return _value == 0 ;
58
62
}
59
63
60
- inline UnifiedOopRef UnifiedOopRef::encode_in_native (const narrowOop* ref) {
64
+ template <typename T>
65
+ inline UnifiedOopRef create_with_tag (T ref, uintptr_t tag) {
61
66
assert (ref != NULL , " invariant" );
62
- UnifiedOopRef result = { reinterpret_cast <uintptr_t >(ref) | 3 };
63
- assert (result.addr <narrowOop*>() == ref, " sanity" );
67
+
68
+ uintptr_t value = reinterpret_cast <uintptr_t >(ref);
69
+
70
+ #ifdef _LP64
71
+ // tag_mask is 3 bits. When ref is a narrowOop* we only have 2 alignment
72
+ // bits, because of the 4 byte alignment of compressed oops addresses.
73
+ // Shift up to make way for one more bit.
74
+ assert ((value & (1ull << 63 )) == 0 , " Unexpected high-order bit" );
75
+ value <<= 1 ;
76
+ #endif
77
+ assert ((value & UnifiedOopRef::tag_mask) == 0 , " Unexpected low-order bits" );
78
+
79
+ UnifiedOopRef result = { value | tag };
80
+ assert (result.addr <T>() == ref, " sanity" );
64
81
return result;
65
82
}
66
83
84
+ inline UnifiedOopRef UnifiedOopRef::encode_in_native (const narrowOop* ref) {
85
+ NOT_LP64 (ShouldNotReachHere ());
86
+ return create_with_tag (ref, native_tag | narrow_tag);
87
+ }
88
+
67
89
inline UnifiedOopRef UnifiedOopRef::encode_in_native (const oop* ref) {
68
- assert (ref != NULL , " invariant" );
69
- UnifiedOopRef result = { reinterpret_cast <uintptr_t >(ref) | 2 };
70
- assert (result.addr <oop*>() == ref, " sanity" );
71
- return result;
90
+ return create_with_tag (ref, native_tag);
91
+ }
92
+
93
+ inline UnifiedOopRef UnifiedOopRef::encode_as_raw (const narrowOop* ref) {
94
+ NOT_LP64 (ShouldNotReachHere ());
95
+ return create_with_tag (ref, raw_tag | narrow_tag);
96
+ }
97
+
98
+ inline UnifiedOopRef UnifiedOopRef::encode_as_raw (const oop* ref) {
99
+ return create_with_tag (ref, raw_tag);
72
100
}
73
101
74
102
inline UnifiedOopRef UnifiedOopRef::encode_in_heap (const narrowOop* ref) {
75
- assert (ref != NULL , " invariant" );
76
- UnifiedOopRef result = { reinterpret_cast <uintptr_t >(ref) | 1 };
77
- assert (result.addr <narrowOop*>() == ref, " sanity" );
78
- return result;
103
+ NOT_LP64 (ShouldNotReachHere ());
104
+ return create_with_tag (ref, narrow_tag);
79
105
}
80
106
81
107
inline UnifiedOopRef UnifiedOopRef::encode_in_heap (const oop* ref) {
82
- assert (ref != NULL , " invariant" );
83
- UnifiedOopRef result = { reinterpret_cast <uintptr_t >(ref) | 0 };
84
- assert (result.addr <oop*>() == ref, " sanity" );
85
- return result;
108
+ return create_with_tag (ref, 0 );
86
109
}
87
110
88
111
inline UnifiedOopRef UnifiedOopRef::encode_null () {
@@ -91,14 +114,23 @@ inline UnifiedOopRef UnifiedOopRef::encode_null() {
91
114
}
92
115
93
116
inline oop UnifiedOopRef::dereference () const {
94
- if (is_native ()) {
117
+ if (is_raw ()) {
118
+ if (is_narrow ()) {
119
+ NOT_LP64 (ShouldNotReachHere ());
120
+ return RawAccess<>::oop_load (addr<narrowOop*>());
121
+ } else {
122
+ return *addr<oop*>();
123
+ }
124
+ } else if (is_native ()) {
95
125
if (is_narrow ()) {
126
+ NOT_LP64 (ShouldNotReachHere ());
96
127
return NativeAccess<AS_NO_KEEPALIVE>::oop_load (addr<narrowOop*>());
97
128
} else {
98
129
return NativeAccess<AS_NO_KEEPALIVE>::oop_load (addr<oop*>());
99
130
}
100
131
} else {
101
132
if (is_narrow ()) {
133
+ NOT_LP64 (ShouldNotReachHere ());
102
134
return HeapAccess<AS_NO_KEEPALIVE>::oop_load (addr<narrowOop*>());
103
135
} else {
104
136
return HeapAccess<AS_NO_KEEPALIVE>::oop_load (addr<oop*>());
0 commit comments