-
Notifications
You must be signed in to change notification settings - Fork 54
/
Copy pathCell.h
454 lines (367 loc) · 14.2 KB
/
Cell.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_Cell_h
#define gc_Cell_h
#include "gc/GCEnum.h"
#include "gc/Heap.h"
#include "js/GCAnnotations.h"
#include "js/TraceKind.h"
#include "js/TypeDecls.h"
namespace JS {
namespace shadow {
struct Zone;
} /* namespace shadow */
enum class TraceKind;
} /* namespace JS */
namespace js {
class GenericPrinter;
extern bool RuntimeFromMainThreadIsHeapMajorCollecting(
JS::shadow::Zone* shadowZone);
#ifdef DEBUG
// Barriers can't be triggered during backend Ion compilation, which may run on
// a helper thread.
extern bool CurrentThreadIsIonCompiling();
#endif
extern void TraceManuallyBarrieredGenericPointerEdge(JSTracer* trc,
gc::Cell** thingp,
const char* name);
namespace gc {
class Arena;
enum class AllocKind : uint8_t;
struct Chunk;
class StoreBuffer;
class TenuredCell;
// [SMDOC] GC Cell
//
// A GC cell is the base class for all GC things. All types allocated on the GC
// heap extend either gc::Cell or gc::TenuredCell. If a type is always tenured,
// prefer the TenuredCell class as base.
//
// The first word (a pointer or uintptr_t) of each Cell must reserve the low
// Cell::ReservedBits bits for GC purposes. The remaining bits are available to
// sub-classes and typically store a pointer to another gc::Cell.
//
// During moving GC operation a Cell may be marked as forwarded. This indicates
// that a gc::RelocationOverlay is currently stored in the Cell's memory and
// should be used to find the new location of the Cell.
struct alignas(gc::CellAlignBytes) Cell {
public:
// The low bits of the first word of each Cell are reserved for GC flags.
static constexpr int ReservedBits = 2;
static constexpr uintptr_t RESERVED_MASK = JS_BITMASK(ReservedBits);
// Indicates if the cell is currently a RelocationOverlay
static constexpr uintptr_t FORWARD_BIT = JS_BIT(0);
// When a Cell is in the nursery, this will indicate if it is a JSString (1)
// or JSObject (0). When not in nursery, this bit is still reserved for
// JSString to use as JSString::NON_ATOM bit. This may be removed by Bug
// 1376646.
static constexpr uintptr_t JSSTRING_BIT = JS_BIT(1);
MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
MOZ_ALWAYS_INLINE TenuredCell& asTenured();
MOZ_ALWAYS_INLINE bool isMarkedAny() const;
MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
MOZ_ALWAYS_INLINE bool isMarkedGray() const;
inline JSRuntime* runtimeFromMainThread() const;
// Note: Unrestricted access to the runtime of a GC thing from an arbitrary
// thread can easily lead to races. Use this method very carefully.
inline JSRuntime* runtimeFromAnyThread() const;
// May be overridden by GC thing kinds that have a compartment pointer.
inline JS::Compartment* maybeCompartment() const { return nullptr; }
// The StoreBuffer used to record incoming pointers from the tenured heap.
// This will return nullptr for a tenured cell.
inline StoreBuffer* storeBuffer() const;
inline JS::TraceKind getTraceKind() const;
static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone* zone);
inline bool isForwarded() const {
uintptr_t firstWord = *reinterpret_cast<const uintptr_t*>(this);
return firstWord & FORWARD_BIT;
}
inline bool nurseryCellIsString() const {
MOZ_ASSERT(!isTenured());
uintptr_t firstWord = *reinterpret_cast<const uintptr_t*>(this);
return firstWord & JSSTRING_BIT;
}
template <class T>
inline bool is() const {
return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
}
template <class T>
inline T* as() {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<T*>(this);
}
template <class T>
inline const T* as() const {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<const T*>(this);
}
#ifdef DEBUG
static inline void assertThingIsNotGray(Cell* cell);
inline bool isAligned() const;
void dump(GenericPrinter& out) const;
void dump() const;
#endif
protected:
uintptr_t address() const;
inline Chunk* chunk() const;
} JS_HAZ_GC_THING;
// A GC TenuredCell gets behaviors that are valid for things in the Tenured
// heap, such as access to the arena and mark bits.
class TenuredCell : public Cell {
public:
// Construct a TenuredCell from a void*, making various sanity assertions.
static MOZ_ALWAYS_INLINE TenuredCell* fromPointer(void* ptr);
static MOZ_ALWAYS_INLINE const TenuredCell* fromPointer(const void* ptr);
MOZ_ALWAYS_INLINE bool isTenured() const {
MOZ_ASSERT(!IsInsideNursery(this));
return true;
}
// Mark bit management.
MOZ_ALWAYS_INLINE bool isMarkedAny() const;
MOZ_ALWAYS_INLINE bool isMarkedBlack() const;
MOZ_ALWAYS_INLINE bool isMarkedGray() const;
// The return value indicates if the cell went from unmarked to marked.
MOZ_ALWAYS_INLINE bool markIfUnmarked(
MarkColor color = MarkColor::Black) const;
MOZ_ALWAYS_INLINE void markBlack() const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell* src);
MOZ_ALWAYS_INLINE void unmark();
// Access to the arena.
inline Arena* arena() const;
inline AllocKind getAllocKind() const;
inline JS::TraceKind getTraceKind() const;
inline JS::Zone* zone() const;
inline JS::Zone* zoneFromAnyThread() const;
inline bool isInsideZone(JS::Zone* zone) const;
MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZone() const {
return JS::shadow::Zone::asShadowZone(zone());
}
MOZ_ALWAYS_INLINE JS::shadow::Zone* shadowZoneFromAnyThread() const {
return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
}
template <class T>
inline bool is() const {
return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
}
template <class T>
inline T* as() {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<T*>(this);
}
template <class T>
inline const T* as() const {
// |this|-qualify the |is| call below to avoid compile errors with even
// fairly recent versions of gcc, e.g. 7.1.1 according to bz.
MOZ_ASSERT(this->is<T>());
return static_cast<const T*>(this);
}
static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell* thing);
static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell* thing);
static void MOZ_ALWAYS_INLINE writeBarrierPost(void* cellp,
TenuredCell* prior,
TenuredCell* next);
// Default implementation for kinds that don't require fixup.
void fixupAfterMovingGC() {}
#ifdef DEBUG
inline bool isAligned() const;
#endif
};
MOZ_ALWAYS_INLINE const TenuredCell& Cell::asTenured() const {
MOZ_ASSERT(isTenured());
return *static_cast<const TenuredCell*>(this);
}
MOZ_ALWAYS_INLINE TenuredCell& Cell::asTenured() {
MOZ_ASSERT(isTenured());
return *static_cast<TenuredCell*>(this);
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedAny() const {
return !isTenured() || asTenured().isMarkedAny();
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedBlack() const {
return !isTenured() || asTenured().isMarkedBlack();
}
MOZ_ALWAYS_INLINE bool Cell::isMarkedGray() const {
return isTenured() && asTenured().isMarkedGray();
}
inline JSRuntime* Cell::runtimeFromMainThread() const {
JSRuntime* rt = chunk()->trailer.runtime;
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
return rt;
}
inline JSRuntime* Cell::runtimeFromAnyThread() const {
return chunk()->trailer.runtime;
}
inline uintptr_t Cell::address() const {
uintptr_t addr = uintptr_t(this);
MOZ_ASSERT(addr % CellAlignBytes == 0);
MOZ_ASSERT(Chunk::withinValidRange(addr));
return addr;
}
Chunk* Cell::chunk() const {
uintptr_t addr = uintptr_t(this);
MOZ_ASSERT(addr % CellAlignBytes == 0);
addr &= ~ChunkMask;
return reinterpret_cast<Chunk*>(addr);
}
inline StoreBuffer* Cell::storeBuffer() const {
return chunk()->trailer.storeBuffer;
}
inline JS::TraceKind Cell::getTraceKind() const {
if (isTenured()) {
return asTenured().getTraceKind();
}
if (nurseryCellIsString()) {
return JS::TraceKind::String;
}
return JS::TraceKind::Object;
}
/* static */ MOZ_ALWAYS_INLINE bool Cell::needWriteBarrierPre(JS::Zone* zone) {
return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
}
/* static */ MOZ_ALWAYS_INLINE TenuredCell* TenuredCell::fromPointer(
void* ptr) {
MOZ_ASSERT(static_cast<TenuredCell*>(ptr)->isTenured());
return static_cast<TenuredCell*>(ptr);
}
/* static */ MOZ_ALWAYS_INLINE const TenuredCell* TenuredCell::fromPointer(
const void* ptr) {
MOZ_ASSERT(static_cast<const TenuredCell*>(ptr)->isTenured());
return static_cast<const TenuredCell*>(ptr);
}
bool TenuredCell::isMarkedAny() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->bitmap.isMarkedAny(this);
}
bool TenuredCell::isMarkedBlack() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->bitmap.isMarkedBlack(this);
}
bool TenuredCell::isMarkedGray() const {
MOZ_ASSERT(arena()->allocated());
return chunk()->bitmap.isMarkedGray(this);
}
bool TenuredCell::markIfUnmarked(MarkColor color /* = Black */) const {
return chunk()->bitmap.markIfUnmarked(this, color);
}
void TenuredCell::markBlack() const { chunk()->bitmap.markBlack(this); }
void TenuredCell::copyMarkBitsFrom(const TenuredCell* src) {
ChunkBitmap& bitmap = chunk()->bitmap;
bitmap.copyMarkBit(this, src, ColorBit::BlackBit);
bitmap.copyMarkBit(this, src, ColorBit::GrayOrBlackBit);
}
void TenuredCell::unmark() { chunk()->bitmap.unmark(this); }
inline Arena* TenuredCell::arena() const {
MOZ_ASSERT(isTenured());
uintptr_t addr = address();
addr &= ~ArenaMask;
return reinterpret_cast<Arena*>(addr);
}
AllocKind TenuredCell::getAllocKind() const { return arena()->getAllocKind(); }
JS::TraceKind TenuredCell::getTraceKind() const {
return MapAllocToTraceKind(getAllocKind());
}
JS::Zone* TenuredCell::zone() const {
JS::Zone* zone = arena()->zone;
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
return zone;
}
JS::Zone* TenuredCell::zoneFromAnyThread() const { return arena()->zone; }
bool TenuredCell::isInsideZone(JS::Zone* zone) const {
return zone == arena()->zone;
}
/* static */ MOZ_ALWAYS_INLINE void TenuredCell::readBarrier(
TenuredCell* thing) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
MOZ_ASSERT(thing);
MOZ_ASSERT(CurrentThreadCanAccessZone(thing->zoneFromAnyThread()));
// Barriers should not be triggered on main thread while collecting.
MOZ_ASSERT_IF(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()),
!JS::RuntimeHeapIsCollecting());
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
// Barriers are only enabled on the main thread and are disabled while
// collecting.
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
Cell* tmp = thing;
TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp,
"read barrier");
MOZ_ASSERT(tmp == thing);
}
if (thing->isMarkedGray()) {
// There shouldn't be anything marked grey unless we're on the main thread.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread()));
if (!JS::RuntimeHeapIsCollecting()) {
JS::UnmarkGrayGCThingRecursively(
JS::GCCellPtr(thing, thing->getTraceKind()));
}
}
}
void AssertSafeToSkipBarrier(TenuredCell* thing);
/* static */ MOZ_ALWAYS_INLINE void TenuredCell::writeBarrierPre(
TenuredCell* thing) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (!thing) {
return;
}
#ifdef JS_GC_ZEAL
// When verifying pre barriers we need to switch on all barriers, even
// those on the Atoms Zone. Normally, we never enter a parse task when
// collecting in the atoms zone, so will filter out atoms below.
// Unfortuantely, If we try that when verifying pre-barriers, we'd never be
// able to handle off thread parse tasks at all as we switch on the verifier
// any time we're not doing GC. This would cause us to deadlock, as off thread
// parsing is meant to resume after GC work completes. Instead we filter out
// any off thread barriers that reach us and assert that they would normally
// not be possible.
if (!CurrentThreadCanAccessRuntime(thing->runtimeFromAnyThread())) {
AssertSafeToSkipBarrier(thing);
return;
}
#endif
JS::shadow::Zone* shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
Cell* tmp = thing;
TraceManuallyBarrieredGenericPointerEdge(shadowZone->barrierTracer(), &tmp,
"pre barrier");
MOZ_ASSERT(tmp == thing);
}
}
static MOZ_ALWAYS_INLINE void AssertValidToSkipBarrier(TenuredCell* thing) {
MOZ_ASSERT(!IsInsideNursery(thing));
MOZ_ASSERT_IF(
thing,
MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object &&
MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::String);
}
/* static */ MOZ_ALWAYS_INLINE void TenuredCell::writeBarrierPost(
void* cellp, TenuredCell* prior, TenuredCell* next) {
AssertValidToSkipBarrier(next);
}
#ifdef DEBUG
/* static */ void Cell::assertThingIsNotGray(Cell* cell) {
JS::AssertCellIsNotGray(cell);
}
bool Cell::isAligned() const {
if (!isTenured()) {
return true;
}
return asTenured().isAligned();
}
bool TenuredCell::isAligned() const {
return Arena::isAligned(address(), arena()->getThingSize());
}
#endif
} /* namespace gc */
} /* namespace js */
#endif /* gc_Cell_h */