Source code

Revision control

Copy as Markdown

Other Tools

/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
* vim: set ts=8 sts=2 et sw=2 tw=80:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* Implementation of compacting GC.
*/
#include "mozilla/Maybe.h"
#include "debugger/DebugAPI.h"
#include "gc/ArenaList.h"
#include "gc/GCInternals.h"
#include "gc/GCLock.h"
#include "gc/ParallelWork.h"
#include "gc/Zone.h"
#include "jit/JitCode.h"
#include "jit/JitRuntime.h"
#include "jit/JitZone.h"
#include "js/GCAPI.h"
#include "vm/HelperThreads.h"
#include "vm/Realm.h"
#include "wasm/WasmGcObject.h"
#include "gc/Heap-inl.h"
#include "gc/Marking-inl.h"
#include "gc/PrivateIterators-inl.h"
#include "gc/StableCellHasher-inl.h"
#include "gc/TraceMethods-inl.h"
#include "vm/GeckoProfiler-inl.h"
using namespace js;
using namespace js::gc;
using mozilla::Maybe;
using JS::SliceBudget;
bool GCRuntime::canRelocateZone(Zone* zone) const {
return !zone->isAtomsZone();
}
void GCRuntime::beginCompactPhase() {
MOZ_ASSERT(!isBackgroundSweeping());
assertBackgroundSweepingFinished();
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
MOZ_ASSERT(zonesToMaybeCompact.ref().isEmpty());
for (GCZonesIter zone(this); !zone.done(); zone.next()) {
if (canRelocateZone(zone)) {
zonesToMaybeCompact.ref().append(zone);
}
}
startedCompacting = true;
zonesCompacted = 0;
#ifdef DEBUG
AutoLockGC lock(this);
MOZ_ASSERT(!relocatedArenasToRelease);
#endif
}
IncrementalProgress GCRuntime::compactPhase(JS::GCReason reason,
SliceBudget& sliceBudget,
AutoGCSession& session) {
assertBackgroundSweepingFinished();
MOZ_ASSERT(startedCompacting);
AutoMajorGCProfilerEntry s(this);
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT);
// TODO: JSScripts can move. If the sampler interrupts the GC in the
// middle of relocating an arena, invalid JSScript pointers may be
// accessed. Suppress all sampling until a finer-grained solution can be
// found. See bug 1295775.
AutoSuppressProfilerSampling suppressSampling(rt->mainContextFromOwnThread());
ZoneList relocatedZones;
Arena* relocatedArenas = nullptr;
while (!zonesToMaybeCompact.ref().isEmpty()) {
Zone* zone = zonesToMaybeCompact.ref().front();
zonesToMaybeCompact.ref().removeFront();
MOZ_ASSERT(nursery().isEmpty());
zone->changeGCState(Zone::Finished, Zone::Compact);
if (relocateArenas(zone, reason, relocatedArenas, sliceBudget)) {
updateZonePointersToRelocatedCells(zone);
relocatedZones.append(zone);
zonesCompacted++;
} else {
zone->changeGCState(Zone::Compact, Zone::Finished);
}
if (sliceBudget.isOverBudget()) {
break;
}
}
if (!relocatedZones.isEmpty()) {
updateRuntimePointersToRelocatedCells(session);
do {
Zone* zone = relocatedZones.front();
relocatedZones.removeFront();
zone->changeGCState(Zone::Compact, Zone::Finished);
} while (!relocatedZones.isEmpty());
}
clearRelocatedArenas(relocatedArenas, reason);
#ifdef DEBUG
protectOrReleaseRelocatedArenas(relocatedArenas, reason);
#else
releaseRelocatedArenas(relocatedArenas);
#endif
// Clear caches that can contain cell pointers.
rt->caches().purgeForCompaction();
#ifdef DEBUG
checkHashTablesAfterMovingGC();
#endif
return zonesToMaybeCompact.ref().isEmpty() ? Finished : NotFinished;
}
void GCRuntime::endCompactPhase() { startedCompacting = false; }
static bool ShouldRelocateAllArenas(JS::GCReason reason) {
return reason == JS::GCReason::DEBUG_GC;
}
/*
* Choose which arenas to relocate all cells from.
*
* Return a pair of arena pointers indicating the arenas to relocate that can be
* passed to removeRange(), or null pointers if nothing can be relocated.
*/
std::pair<Arena*, Arena*> ArenaList::pickArenasToRelocate(
AllocKind kind, size_t& arenaTotalOut, size_t& relocTotalOut) {
// Relocate the greatest number of arenas such that the number of used cells
// in relocated arenas is less than or equal to the number of free cells in
// unrelocated arenas. In other words we only relocate cells we can move
// into existing arenas, and we choose the least full areans to relocate.
//
// This is made easier by the fact that the start of the arena list has been
// sorted in descending order of number of used cells, so we will always
// relocate a sublist of the arena list. All we need to do is find the points
// at which to start and end relocating.
if (!hasNonFullArenas()) {
// All arenas are full so no compacting is possible.
return {nullptr, nullptr};
}
// Count non-full and full arenas and total used cells, and find the last
// non-full arena.
size_t fullArenaCount = 0; // Number of full arenas (not relocated).
size_t nonFullArenaCount = 0; // Number of non-full arenas to consider.
size_t totalUsedCells = 0; // Total used cells in non-full arenas.
Arena* lastNonFullArena = nullptr;
Iterator arena = iter();
for (; !arena.done(); arena.next()) {
if (arena->isFull()) {
break;
}
MOZ_ASSERT(!arena->isFull());
nonFullArenaCount++;
totalUsedCells += arena->countUsedCells();
lastNonFullArena = arena.get();
}
for (; !arena.done(); arena.next()) {
// It's likely that the final arena is not full but we ignore that.
fullArenaCount++;
}
size_t previousFreeCells = 0; // Total free cells before arena.
size_t followingUsedCells =
totalUsedCells; // Total used cells in non full arenas afterwards.
size_t relocCount = nonFullArenaCount; // Number of arenas to relocate.
Arena* prev = nullptr; // The previous arena considered.
const size_t cellsPerArena = Arena::thingsPerArena(kind);
// Examine the initial part of the list containing non-full arenas.
for (arena = iter(); prev != lastNonFullArena;
prev = arena.get(), arena.next()) {
if (followingUsedCells <= previousFreeCells) {
// We have found the point where cells in the following non-full arenas
// can be relocated into the free space in previous arenas. We're done.
break;
}
size_t freeCells = arena->countFreeCells();
MOZ_ASSERT(freeCells != 0);
size_t usedCells = cellsPerArena - freeCells;
followingUsedCells -= usedCells;
previousFreeCells += freeCells;
MOZ_ASSERT(relocCount != 0);
relocCount--;
}
MOZ_ASSERT((relocCount == 0) == (prev == lastNonFullArena));
arenaTotalOut += fullArenaCount + nonFullArenaCount;
relocTotalOut += relocCount;
if (relocCount == 0) {
return {nullptr, nullptr};
}
return {prev, lastNonFullArena};
}
#ifdef DEBUG
inline bool PtrIsInRange(const void* ptr, const void* start, size_t length) {
return uintptr_t(ptr) - uintptr_t(start) < length;
}
#endif
static void RelocateCell(Zone* zone, TenuredCell* src, AllocKind thingKind,
size_t thingSize) {
JS::AutoSuppressGCAnalysis nogc;
// Allocate a new cell.
MOZ_ASSERT(zone == src->zone());
TenuredCell* dst =
reinterpret_cast<TenuredCell*>(AllocateTenuredCellInGC(zone, thingKind));
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
// Move any uid attached to the object.
gc::TransferUniqueId(dst, src);
if (IsObjectAllocKind(thingKind)) {
auto* srcObj = static_cast<JSObject*>(static_cast<Cell*>(src));
auto* dstObj = static_cast<JSObject*>(static_cast<Cell*>(dst));
if (srcObj->is<NativeObject>()) {
NativeObject* srcNative = &srcObj->as<NativeObject>();
NativeObject* dstNative = &dstObj->as<NativeObject>();
// Fixup the pointer to inline object elements if necessary.
if (srcNative->hasFixedElements()) {
uint32_t numShifted =
srcNative->getElementsHeader()->numShiftedElements();
dstNative->setFixedElements(numShifted);
}
} else if (srcObj->is<ProxyObject>()) {
if (srcObj->as<ProxyObject>().usingInlineValueArray()) {
dstObj->as<ProxyObject>().setInlineValueArray();
}
}
// Call object moved hook if present.
if (JSObjectMovedOp op = srcObj->getClass()->extObjectMovedOp()) {
op(dstObj, srcObj);
}
MOZ_ASSERT_IF(
dstObj->is<NativeObject>(),
!PtrIsInRange(
(const Value*)dstObj->as<NativeObject>().getDenseElements(), src,
thingSize));
}
// Copy the mark bits.
dst->copyMarkBitsFrom(src);
// Poison the source cell contents except for the forwarding flag and pointer
// which will be stored in the first word. We can't do this for native object
// with fixed elements because this would overwrite the element flags and
// these are needed when updating COW elements referred to by other objects.
#ifdef DEBUG
JSObject* srcObj = IsObjectAllocKind(thingKind)
? static_cast<JSObject*>(static_cast<Cell*>(src))
: nullptr;
bool doNotPoison =
srcObj && ((srcObj->is<NativeObject>() &&
srcObj->as<NativeObject>().hasFixedElements()) ||
(srcObj->is<WasmArrayObject>() &&
srcObj->as<WasmArrayObject>().isDataInline()));
if (!doNotPoison) {
AlwaysPoison(reinterpret_cast<uint8_t*>(src) + sizeof(uintptr_t),
JS_MOVED_TENURED_PATTERN, thingSize - sizeof(uintptr_t),
MemCheckKind::MakeNoAccess);
}
#endif
// Mark source cell as forwarded and leave a pointer to the destination.
RelocationOverlay::forwardCell(src, dst);
}
static void RelocateArena(Arena* arena, SliceBudget& sliceBudget) {
MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!arena->onDelayedMarkingList());
MOZ_ASSERT(arena->bufferedCells()->isEmpty());
Zone* zone = arena->zone();
AllocKind thingKind = arena->getAllocKind();
size_t thingSize = arena->getThingSize();
for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
RelocateCell(zone, cell, thingKind, thingSize);
sliceBudget.step();
}
#ifdef DEBUG
for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
TenuredCell* src = cell;
MOZ_ASSERT(src->isForwarded());
TenuredCell* dest = Forwarded(src);
MOZ_ASSERT(src->isMarkedBlack() == dest->isMarkedBlack());
MOZ_ASSERT(src->isMarkedGray() == dest->isMarkedGray());
}
#endif
}
/*
* Relocate all arenas identified by pickArenasToRelocate: for each arena,
* relocate each cell within it, then add it to a list of relocated arenas.
*/
Arena* ArenaList::relocateArenas(Arena* toRelocate, Arena* relocated,
SliceBudget& sliceBudget,
gcstats::Statistics& stats) {
while (Arena* arena = toRelocate) {
toRelocate = arena->next;
RelocateArena(arena, sliceBudget);
// Prepend to list of relocated arenas
arena->next = relocated;
relocated = arena;
stats.count(gcstats::COUNT_ARENA_RELOCATED);
}
return relocated;
}
// Skip compacting zones unless we can free a certain proportion of their GC
// heap memory.
static const double MIN_ZONE_RECLAIM_PERCENT = 2.0;
static bool ShouldRelocateZone(size_t arenaCount, size_t relocCount,
JS::GCReason reason) {
if (relocCount == 0) {
return false;
}
if (IsOOMReason(reason)) {
return true;
}
double relocFraction = double(relocCount) / double(arenaCount);
return relocFraction * 100.0 >= MIN_ZONE_RECLAIM_PERCENT;
}
static AllocKinds CompactingAllocKinds() {
AllocKinds result;
for (AllocKind kind : AllAllocKinds()) {
if (IsCompactingKind(kind)) {
result += kind;
}
}
return result;
}
bool ArenaLists::relocateArenas(Arena*& relocatedListOut, JS::GCReason reason,
SliceBudget& sliceBudget,
gcstats::Statistics& stats) {
// This is only called from the main thread while we are doing a GC, so
// there is no need to lock.
MOZ_ASSERT(CurrentThreadCanAccessRuntime(runtime()));
MOZ_ASSERT(runtime()->gc.isHeapCompacting());
MOZ_ASSERT(!runtime()->gc.isBackgroundSweeping());
// Relocate all compatible kinds
AllocKinds allocKindsToRelocate = CompactingAllocKinds();
// Clear all the free lists.
clearFreeLists();
if (ShouldRelocateAllArenas(reason)) {
zone_->prepareForCompacting();
for (auto kind : allocKindsToRelocate) {
ArenaList& al = arenaList(kind);
Arena* allArenas = al.release();
relocatedListOut =
al.relocateArenas(allArenas, relocatedListOut, sliceBudget, stats);
}
} else {
size_t arenaCount = 0;
size_t relocCount = 0;
AllAllocKindArray<std::pair<Arena*, Arena*>> rangeToRelocate;
for (auto kind : allocKindsToRelocate) {
rangeToRelocate[kind] =
arenaList(kind).pickArenasToRelocate(kind, arenaCount, relocCount);
}
if (!ShouldRelocateZone(arenaCount, relocCount, reason)) {
return false;
}
zone_->prepareForCompacting();
for (auto kind : allocKindsToRelocate) {
if (rangeToRelocate[kind].first) {
ArenaList& al = arenaList(kind);
const auto& range = rangeToRelocate[kind];
Arena* arenas = al.removeRange(range.first, range.second);
relocatedListOut =
al.relocateArenas(arenas, relocatedListOut, sliceBudget, stats);
}
}
}
return true;
}
bool GCRuntime::relocateArenas(Zone* zone, JS::GCReason reason,
Arena*& relocatedListOut,
SliceBudget& sliceBudget) {
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_MOVE);
MOZ_ASSERT(!zone->isPreservingCode());
MOZ_ASSERT(canRelocateZone(zone));
js::CancelOffThreadIonCompile(rt, JS::Zone::Compact);
if (!zone->arenas.relocateArenas(relocatedListOut, reason, sliceBudget,
stats())) {
return false;
}
#ifdef DEBUG
// Check that we did as much compaction as we should have. There
// should always be less than one arena's worth of free cells.
for (auto kind : CompactingAllocKinds()) {
ArenaList& al = zone->arenas.arenaList(kind);
size_t freeCells = 0;
for (auto arena = al.iter(); !arena.done(); arena.next()) {
if (arena->isFull()) {
break;
}
freeCells += arena->countFreeCells();
}
MOZ_ASSERT(freeCells < Arena::thingsPerArena(kind));
}
#endif
return true;
}
MovingTracer::MovingTracer(JSRuntime* rt)
: GenericTracerImpl(rt, JS::TracerKind::Moving,
JS::WeakMapTraceAction::TraceKeysAndValues) {}
template <typename T>
inline void MovingTracer::onEdge(T** thingp, const char* name) {
T* thing = *thingp;
if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing)) {
*thingp = Forwarded(thing);
}
}
void Zone::prepareForCompacting() {
JS::GCContext* gcx = runtimeFromMainThread()->gcContext();
discardJitCode(gcx);
}
void GCRuntime::sweepZoneAfterCompacting(MovingTracer* trc, Zone* zone) {
MOZ_ASSERT(zone->isGCCompacting());
zone->traceWeakMaps(trc);
zone->sweepObjectsWithWeakPointers(trc);
traceWeakFinalizationObserverEdges(trc, zone);
for (auto* cache : zone->weakCaches()) {
cache->traceWeak(trc, JS::detail::WeakCacheBase::DontLockStoreBuffer);
}
if (jit::JitZone* jitZone = zone->jitZone()) {
jitZone->traceWeak(trc, zone);
}
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
c->traceWeakNativeIterators(trc);
for (RealmsInCompartmentIter r(c); !r.done(); r.next()) {
r->traceWeakSavedStacks(trc);
r->traceWeakGlobalEdge(trc);
r->traceWeakDebugEnvironmentEdges(trc);
}
}
}
template <typename T>
static inline void UpdateCellPointers(MovingTracer* trc, T* cell) {
// We only update unmoved GC things or the new copy of moved GC things, never
// the old copy. If this happened it could clear the forwarded flag which
// could lead to pointers to the old copy not being updated.
MOZ_ASSERT(!cell->isForwarded());
cell->fixupAfterMovingGC();
cell->traceChildren(trc);
}
template <typename T>
static void UpdateArenaPointersTyped(MovingTracer* trc, Arena* arena) {
for (ArenaCellIterUnderGC cell(arena); !cell.done(); cell.next()) {
UpdateCellPointers(trc, cell.as<T>());
}
}
static bool CanUpdateKindInBackground(AllocKind kind) {
// We try to update as many GC things in parallel as we can, but there are
// kinds for which this might not be safe:
// - we assume JSObjects that are foreground finalized are not safe to
// update in parallel
// - updating a SharedPropMap touches child maps in
// SharedPropMap::fixupAfterMovingGC
return js::gc::IsBackgroundFinalized(kind) && !IsShapeAllocKind(kind) &&
kind != AllocKind::BASE_SHAPE;
}
/*
* Update the internal pointers for all cells in an arena.
*/
static void UpdateArenaPointers(MovingTracer* trc, Arena* arena) {
AllocKind kind = arena->getAllocKind();
MOZ_ASSERT_IF(!CanUpdateKindInBackground(kind),
CurrentThreadCanAccessRuntime(trc->runtime()));
switch (kind) {
#define EXPAND_CASE(allocKind, traceKind, type, sizedType, bgFinal, nursery, \
compact) \
case AllocKind::allocKind: \
UpdateArenaPointersTyped<type>(trc, arena); \
return;
FOR_EACH_ALLOCKIND(EXPAND_CASE)
#undef EXPAND_CASE
default:
MOZ_CRASH("Invalid alloc kind for UpdateArenaPointers");
}
}
struct ArenaListSegment {
Arena* begin;
Arena* end;
};
/*
* Update the internal pointers for all arenas in a segment of an arena list.
*
* Returns the number of steps to count against the slice budget.
*/
static size_t UpdateArenaListSegmentPointers(GCRuntime* gc,
const ArenaListSegment& arenas) {
MOZ_ASSERT(arenas.begin);
MovingTracer trc(gc->rt);
size_t count = 0;
Arena* arena = arenas.begin;
do {
UpdateArenaPointers(&trc, arena);
count++;
arena = arena->next;
} while (arena != arenas.end);
return count * 256;
}
class ArenasToUpdate {
// Maximum number of arenas to update in one block.
#ifdef DEBUG
static const unsigned MaxArenasToProcess = 16;
#else
static const unsigned MaxArenasToProcess = 256;
#endif
public:
ArenasToUpdate(Zone* zone, const AllocKinds& kinds)
: kinds(kinds), zone(zone) {
settle();
}
bool done() const {
MOZ_ASSERT_IF(!segmentBegin, endOfArenaList);
return !segmentBegin;
}
ArenaListSegment get() const {
MOZ_ASSERT(!done());
return {segmentBegin, segmentEnd};
}
void next();
private:
AllocKinds kinds; // Selects which thing kinds to update.
Zone* zone; // Zone to process.
AllocKind kind = AllocKind::FIRST; // Current alloc kind to process.
Arena* segmentBegin = nullptr;
Arena* segmentEnd = nullptr;
bool endOfArenaList = true;
static AllocKind nextAllocKind(AllocKind i) {
return AllocKind(uint8_t(i) + 1);
}
void settle();
void findSegmentEnd();
};
void ArenasToUpdate::settle() {
// Called when we have set |kind| to a new kind. Sets |arena| to the next
// arena or null if there are no more arenas to update.
MOZ_ASSERT(!segmentBegin);
MOZ_ASSERT(endOfArenaList);
for (; kind < AllocKind::LIMIT; kind = nextAllocKind(kind)) {
if (!kinds.contains(kind)) {
continue;
}
Arena* arena = zone->arenas.getFirstArena(kind);
if (arena) {
segmentBegin = arena;
endOfArenaList = false;
findSegmentEnd();
break;
}
}
}
void ArenasToUpdate::findSegmentEnd() {
// Take up to MaxArenasToProcess arenas from the list starting at
// |segmentBegin| and set |segmentEnd| and |endOfArenaList|.
MOZ_ASSERT(segmentBegin);
MOZ_ASSERT(!endOfArenaList);
Arena* arena = segmentBegin;
Arena* firstArena = zone->arenas.getFirstArena(kind);
for (size_t i = 0; i < MaxArenasToProcess; i++) {
arena = arena->next;
if (arena == firstArena) {
// We have reached the end of the circular linked list.
endOfArenaList = true;
break;
}
}
segmentEnd = arena;
}
void ArenasToUpdate::next() {
MOZ_ASSERT(!done());
if (!endOfArenaList) {
segmentBegin = segmentEnd;
findSegmentEnd();
return;
}
segmentBegin = nullptr;
kind = nextAllocKind(kind);
settle();
}
static AllocKinds ForegroundUpdateKinds(AllocKinds kinds) {
AllocKinds result;
for (AllocKind kind : kinds) {
if (!CanUpdateKindInBackground(kind)) {
result += kind;
}
}
return result;
}
void GCRuntime::updateCellPointers(Zone* zone, AllocKinds kinds) {
AllocKinds fgKinds = ForegroundUpdateKinds(kinds);
AllocKinds bgKinds = kinds - fgKinds;
ArenasToUpdate fgArenas(zone, fgKinds);
ArenasToUpdate bgArenas(zone, bgKinds);
AutoLockHelperThreadState lock;
AutoRunParallelWork bgTasks(this, UpdateArenaListSegmentPointers,
gcstats::PhaseKind::COMPACT_UPDATE_CELLS,
GCUse::Unspecified, bgArenas,
SliceBudget::unlimited(), lock);
AutoUnlockHelperThreadState unlock(lock);
for (; !fgArenas.done(); fgArenas.next()) {
UpdateArenaListSegmentPointers(this, fgArenas.get());
}
}
// After cells have been relocated any pointers to a cell's old locations must
// be updated to point to the new location. This happens by iterating through
// all cells in heap and tracing their children (non-recursively) to update
// them.
//
// This is complicated by the fact that updating a GC thing sometimes depends on
// making use of other GC things. After a moving GC these things may not be in
// a valid state since they may contain pointers which have not been updated
// yet.
//
// The main dependencies are:
//
// - Updating a JSObject makes use of its shape
// - Updating a typed object makes use of its type descriptor object
//
// This means we require at least three phases for update:
//
// 1) shapes
// 2) typed object type descriptor objects
// 3) all other objects
//
// Also, there can be data races calling IsForwarded() on the new location of a
// cell whose first word is being updated in parallel on another thread. This
// easiest way to avoid this is to not store a GC pointer in the first word of a
// cell. Otherwise this can be avoided by updating different kinds of cell in
// different phases.
//
// Since we want to minimize the number of phases, arrange kinds into three
// arbitrary phases.
static constexpr AllocKinds UpdatePhaseOne{AllocKind::SCRIPT,
AllocKind::BASE_SHAPE,
AllocKind::SHAPE,
AllocKind::STRING,
AllocKind::JITCODE,
AllocKind::REGEXP_SHARED,
AllocKind::SCOPE,
AllocKind::GETTER_SETTER,
AllocKind::COMPACT_PROP_MAP,
AllocKind::NORMAL_PROP_MAP,
AllocKind::DICT_PROP_MAP};
// UpdatePhaseTwo is typed object descriptor objects.
static constexpr AllocKinds UpdatePhaseThree{AllocKind::FUNCTION,
AllocKind::FUNCTION_EXTENDED,
AllocKind::OBJECT0,
AllocKind::OBJECT0_BACKGROUND,
AllocKind::OBJECT2,
AllocKind::OBJECT2_BACKGROUND,
AllocKind::ARRAYBUFFER4,
AllocKind::OBJECT4,
AllocKind::OBJECT4_BACKGROUND,
AllocKind::ARRAYBUFFER8,
AllocKind::OBJECT8,
AllocKind::OBJECT8_BACKGROUND,
AllocKind::ARRAYBUFFER12,
AllocKind::OBJECT12,
AllocKind::OBJECT12_BACKGROUND,
AllocKind::ARRAYBUFFER16,
AllocKind::OBJECT16,
AllocKind::OBJECT16_BACKGROUND};
void GCRuntime::updateAllCellPointers(MovingTracer* trc, Zone* zone) {
updateCellPointers(zone, UpdatePhaseOne);
updateCellPointers(zone, UpdatePhaseThree);
}
/*
* Update pointers to relocated cells in a single zone by doing a traversal of
* that zone's arenas and calling per-zone sweep hooks.
*
* The latter is necessary to update weak references which are not marked as
* part of the traversal.
*/
void GCRuntime::updateZonePointersToRelocatedCells(Zone* zone) {
MOZ_ASSERT(!rt->isBeingDestroyed());
MOZ_ASSERT(zone->isGCCompacting());
AutoTouchingGrayThings tgt;
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
MovingTracer trc(rt);
zone->fixupAfterMovingGC();
zone->fixupScriptMapsAfterMovingGC(&trc);
// Fixup compartment global pointers as these get accessed during marking.
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
comp->fixupAfterMovingGC(&trc);
}
zone->externalStringCache().purge();
zone->functionToStringCache().purge();
zone->shapeZone().purgeShapeCaches(rt->gcContext());
rt->caches().stringToAtomCache.purge();
// Iterate through all cells that can contain relocatable pointers to update
// them. Since updating each cell is independent we try to parallelize this
// as much as possible.
updateAllCellPointers(&trc, zone);
// Sweep everything to fix up weak pointers.
sweepZoneAfterCompacting(&trc, zone);
// Call callbacks to get the rest of the system to fixup other untraced
// pointers.
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
callWeakPointerCompartmentCallbacks(&trc, comp);
}
}
/*
* Update runtime-wide pointers to relocated cells.
*/
void GCRuntime::updateRuntimePointersToRelocatedCells(AutoGCSession& session) {
MOZ_ASSERT(!rt->isBeingDestroyed());
gcstats::AutoPhase ap1(stats(), gcstats::PhaseKind::COMPACT_UPDATE);
MovingTracer trc(rt);
Zone::fixupAllCrossCompartmentWrappersAfterMovingGC(&trc);
rt->geckoProfiler().fixupStringsMapAfterMovingGC();
// Mark roots to update them.
traceRuntimeForMajorGC(&trc, session);
jit::UpdateJitActivationsForCompactingGC(rt);
{
gcstats::AutoPhase ap2(stats(), gcstats::PhaseKind::MARK_ROOTS);
DebugAPI::traceAllForMovingGC(&trc);
DebugAPI::traceCrossCompartmentEdges(&trc);
// Mark all gray roots.
traceEmbeddingGrayRoots(&trc);
Compartment::traceIncomingCrossCompartmentEdgesForZoneGC(
&trc, Compartment::GrayEdges);
}
// Sweep everything to fix up weak pointers.
jit::JitRuntime::TraceWeakJitcodeGlobalTable(rt, &trc);
for (JS::detail::WeakCacheBase* cache : rt->weakCaches()) {
cache->traceWeak(&trc, JS::detail::WeakCacheBase::DontLockStoreBuffer);
}
if (rt->hasJitRuntime() && rt->jitRuntime()->hasInterpreterEntryMap()) {
rt->jitRuntime()->getInterpreterEntryMap()->updateScriptsAfterMovingGC();
}
// Type inference may put more blocks here to free.
{
AutoLockHelperThreadState lock;
lifoBlocksToFree.ref().freeAll();
}
// Call callbacks to get the rest of the system to fixup other untraced
// pointers.
callWeakPointerZonesCallbacks(&trc);
}
void GCRuntime::clearRelocatedArenas(Arena* arenaList, JS::GCReason reason) {
AutoLockGC lock(this);
clearRelocatedArenasWithoutUnlocking(arenaList, reason, lock);
}
void GCRuntime::clearRelocatedArenasWithoutUnlocking(Arena* arenaList,
JS::GCReason reason,
const AutoLockGC& lock) {
// Clear the relocated arenas, now containing only forwarding pointers
while (arenaList) {
Arena* arena = arenaList;
arenaList = arenaList->next;
// Clear the mark bits
arena->unmarkAll();
// Mark arena as empty
arena->setAsFullyUnused();
#ifdef DEBUG
// The cell contents have been partially marked no access in RelocateCell,
// so we need to mark the region as undefined again so we can poison it.
SetMemCheckKind(reinterpret_cast<void*>(arena->thingsStart()),
arena->getThingsSpan(), MemCheckKind::MakeUndefined);
#endif
AlwaysPoison(reinterpret_cast<void*>(arena->thingsStart()),
JS_MOVED_TENURED_PATTERN, arena->getThingsSpan(),
MemCheckKind::MakeNoAccess);
// Don't count emptied arenas as being freed by the current GC:
// - if we purposely moved everything to new arenas, as that will already
// have allocated a similar number of arenas. (This only happens for
// collections triggered by GC zeal.)
// - if they were allocated since the start of the GC.
bool allArenasRelocated = ShouldRelocateAllArenas(reason);
bool updateRetainedSize = !allArenasRelocated && !arena->isNewlyCreated();
arena->zone()->gcHeapSize.removeBytes(ArenaSize, updateRetainedSize,
heapSize);
// Release the arena but don't return it to the chunk yet.
arena->release(this, &lock);
}
}
#ifdef DEBUG
// In debug mode we don't always release relocated arenas straight away.
// Sometimes protect them instead and hold onto them until the next GC sweep
// phase to catch any pointers to them that didn't get forwarded.
static inline bool CanProtectArenas() {
// On some systems the page size is larger than the size of an arena so we
// can't change the mapping permissions per arena.
return SystemPageSize() <= ArenaSize;
}
static inline bool ShouldProtectRelocatedArenas(JS::GCReason reason) {
// For zeal mode collections we don't release the relocated arenas
// immediately. Instead we protect them and keep them around until the next
// collection so we can catch any stray accesses to them.
return reason == JS::GCReason::DEBUG_GC && CanProtectArenas();
}
void GCRuntime::protectOrReleaseRelocatedArenas(Arena* arenaList,
JS::GCReason reason) {
if (ShouldProtectRelocatedArenas(reason)) {
protectAndHoldArenas(arenaList);
return;
}
releaseRelocatedArenas(arenaList);
}
void GCRuntime::protectAndHoldArenas(Arena* arenaList) {
for (Arena* arena = arenaList; arena;) {
MOZ_ASSERT(!arena->allocated());
Arena* next = arena->next;
if (!next) {
// Prepend to hold list before we protect the memory.
AutoLockGC lock(this);
arena->next = relocatedArenasToRelease;
relocatedArenasToRelease = arenaList;
}
ProtectPages(arena, ArenaSize);
arena = next;
}
}
void GCRuntime::unprotectHeldRelocatedArenas(const AutoLockGC& lock) {
for (Arena* arena = relocatedArenasToRelease; arena; arena = arena->next) {
UnprotectPages(arena, ArenaSize);
MOZ_ASSERT(!arena->allocated());
}
}
void GCRuntime::releaseHeldRelocatedArenas() {
AutoLockGC lock(this);
unprotectHeldRelocatedArenas(lock);
Arena* arenas = relocatedArenasToRelease;
relocatedArenasToRelease = nullptr;
releaseRelocatedArenasWithoutUnlocking(arenas, lock);
}
void GCRuntime::releaseHeldRelocatedArenasWithoutUnlocking(
const AutoLockGC& lock) {
unprotectHeldRelocatedArenas(lock);
releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
relocatedArenasToRelease = nullptr;
}
#endif
void GCRuntime::releaseRelocatedArenas(Arena* arenaList) {
AutoLockGC lock(this);
releaseRelocatedArenasWithoutUnlocking(arenaList, lock);
}
void GCRuntime::releaseRelocatedArenasWithoutUnlocking(Arena* arenaList,
const AutoLockGC& lock) {
// Release relocated arenas previously cleared with clearRelocatedArenas().
while (arenaList) {
Arena* arena = arenaList;
arenaList = arenaList->next;
// We already updated the memory accounting so just call
// Chunk::releaseArena.
arena->chunk()->releaseArena(this, arena, lock);
}
}