1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "Dalvik.h"
18 #include "alloc/clz.h"
19 #include "alloc/HeapBitmap.h"
20 #include "alloc/HeapInternal.h"
21 #include "alloc/HeapSource.h"
22 #include "alloc/MarkSweep.h"
23 #include "alloc/Visit.h"
24 #include <limits.h> // for ULONG_MAX
25 #include <sys/mman.h> // for madvise(), mmap()
26 #include <errno.h>
27
28 #define GC_LOG_TAG LOG_TAG "-gc"
29
30 #if LOG_NDEBUG
31 #define LOGV_GC(...) ((void)0)
32 #define LOGD_GC(...) ((void)0)
33 #else
34 #define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
35 #define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
36 #endif
37
38 #define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
39 #define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
40 #define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
41
42 #define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
43
44 #define ALIGN_UP(x, n) (((size_t)(x) + (n) - 1) & ~((n) - 1))
45 #define ALIGN_UP_TO_PAGE_SIZE(p) ALIGN_UP(p, SYSTEM_PAGE_SIZE)
46
47 /* Do not cast the result of this to a boolean; the only set bit
48 * may be > 1<<8.
49 */
isMarked(const void * obj,const GcMarkContext * ctx)50 static inline long isMarked(const void *obj, const GcMarkContext *ctx)
51 {
52 return dvmHeapBitmapIsObjectBitSet(ctx->bitmap, obj);
53 }
54
createMarkStack(GcMarkStack * stack)55 static bool createMarkStack(GcMarkStack *stack)
56 {
57 const Object **limit;
58 const char *name;
59 size_t size;
60
61 /* Create a stack big enough for the worst possible case,
62 * where the heap is perfectly full of the smallest object.
63 * TODO: be better about memory usage; use a smaller stack with
64 * overflow detection and recovery.
65 */
66 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
67 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
68 size = ALIGN_UP_TO_PAGE_SIZE(size);
69 name = "dalvik-mark-stack";
70 limit = dvmAllocRegion(size, PROT_READ | PROT_WRITE, name);
71 if (limit == NULL) {
72 LOGE_GC("Could not mmap %zd-byte ashmem region '%s'", size, name);
73 return false;
74 }
75 stack->limit = limit;
76 stack->base = (const Object **)((uintptr_t)limit + size);
77 stack->top = stack->base;
78 return true;
79 }
80
destroyMarkStack(GcMarkStack * stack)81 static void destroyMarkStack(GcMarkStack *stack)
82 {
83 munmap((char *)stack->limit,
84 (uintptr_t)stack->base - (uintptr_t)stack->limit);
85 memset(stack, 0, sizeof(*stack));
86 }
87
88 #define MARK_STACK_PUSH(stack, obj) \
89 do { \
90 *--(stack).top = (obj); \
91 } while (false)
92
dvmHeapBeginMarkStep(GcMode mode)93 bool dvmHeapBeginMarkStep(GcMode mode)
94 {
95 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
96
97 if (!createMarkStack(&ctx->stack)) {
98 return false;
99 }
100 ctx->finger = NULL;
101 ctx->immuneLimit = dvmHeapSourceGetImmuneLimit(mode);
102 return true;
103 }
104
setAndReturnMarkBit(GcMarkContext * ctx,const void * obj)105 static long setAndReturnMarkBit(GcMarkContext *ctx, const void *obj)
106 {
107 return dvmHeapBitmapSetAndReturnObjectBit(ctx->bitmap, obj);
108 }
109
markObjectNonNull(const Object * obj,GcMarkContext * ctx,bool checkFinger)110 static void markObjectNonNull(const Object *obj, GcMarkContext *ctx,
111 bool checkFinger)
112 {
113 assert(ctx != NULL);
114 assert(obj != NULL);
115 assert(dvmIsValidObject(obj));
116
117 if (obj < (Object *)ctx->immuneLimit) {
118 assert(isMarked(obj, ctx));
119 return;
120 }
121 if (!setAndReturnMarkBit(ctx, obj)) {
122 /* This object was not previously marked.
123 */
124 if (checkFinger && (void *)obj < ctx->finger) {
125 /* This object will need to go on the mark stack.
126 */
127 MARK_STACK_PUSH(ctx->stack, obj);
128 }
129
130 #if WITH_HPROF
131 if (gDvm.gcHeap->hprofContext != NULL) {
132 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
133 }
134 #endif
135 }
136 }
137
138 /* Used to mark objects when recursing. Recursion is done by moving
139 * the finger across the bitmaps in address order and marking child
140 * objects. Any newly-marked objects whose addresses are lower than
141 * the finger won't be visited by the bitmap scan, so those objects
142 * need to be added to the mark stack.
143 */
markObject(const Object * obj,GcMarkContext * ctx)144 static void markObject(const Object *obj, GcMarkContext *ctx)
145 {
146 if (obj != NULL) {
147 markObjectNonNull(obj, ctx, true);
148 }
149 }
150
151 /* If the object hasn't already been marked, mark it and
152 * schedule it to be scanned for references.
153 *
154 * obj may not be NULL. The macro dvmMarkObject() should
155 * be used in situations where a reference may be NULL.
156 *
157 * This function may only be called when marking the root
158 * set. When recursing, use the internal markObject().
159 */
dvmMarkObjectNonNull(const Object * obj)160 void dvmMarkObjectNonNull(const Object *obj)
161 {
162 assert(obj != NULL);
163 markObjectNonNull(obj, &gDvm.gcHeap->markContext, false);
164 }
165
166 /* Mark the set of root objects.
167 *
168 * Things we need to scan:
169 * - System classes defined by root classloader
170 * - For each thread:
171 * - Interpreted stack, from top to "curFrame"
172 * - Dalvik registers (args + local vars)
173 * - JNI local references
174 * - Automatic VM local references (TrackedAlloc)
175 * - Associated Thread/VMThread object
176 * - ThreadGroups (could track & start with these instead of working
177 * upward from Threads)
178 * - Exception currently being thrown, if present
179 * - JNI global references
180 * - Interned string table
181 * - Primitive classes
182 * - Special objects
183 * - gDvm.outOfMemoryObj
184 * - Objects allocated with ALLOC_NO_GC
185 * - Objects pending finalization (but not yet finalized)
186 * - Objects in debugger object registry
187 *
188 * Don't need:
189 * - Native stack (for in-progress stuff in the VM)
190 * - The TrackedAlloc stuff watches all native VM references.
191 */
dvmHeapMarkRootSet()192 void dvmHeapMarkRootSet()
193 {
194 GcHeap *gcHeap = gDvm.gcHeap;
195
196 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
197
198 LOG_SCAN("immune objects");
199 dvmMarkImmuneObjects(gcHeap->markContext.immuneLimit);
200
201 LOG_SCAN("root class loader\n");
202 dvmGcScanRootClassLoader();
203 LOG_SCAN("primitive classes\n");
204 dvmGcScanPrimitiveClasses();
205
206 /* dvmGcScanRootThreadGroups() sets a bunch of
207 * different scan states internally.
208 */
209 HPROF_CLEAR_GC_SCAN_STATE();
210
211 LOG_SCAN("root thread groups\n");
212 dvmGcScanRootThreadGroups();
213
214 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
215
216 LOG_SCAN("interned strings\n");
217 dvmGcScanInternedStrings();
218
219 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
220
221 LOG_SCAN("JNI global refs\n");
222 dvmGcMarkJniGlobalRefs();
223
224 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
225
226 LOG_SCAN("pending reference operations\n");
227 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations);
228
229 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
230
231 LOG_SCAN("pending finalizations\n");
232 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs);
233
234 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
235
236 LOG_SCAN("debugger refs\n");
237 dvmGcMarkDebuggerRefs();
238
239 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
240
241 /* Mark any special objects we have sitting around.
242 */
243 LOG_SCAN("special objects\n");
244 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
245 dvmMarkObjectNonNull(gDvm.internalErrorObj);
246 dvmMarkObjectNonNull(gDvm.noClassDefFoundErrorObj);
247 //TODO: scan object references sitting in gDvm; use pointer begin & end
248
249 HPROF_CLEAR_GC_SCAN_STATE();
250 }
251
252 /*
253 * Callback applied to root references. If the root location contains
254 * a white reference it is pushed on the mark stack and grayed.
255 */
markObjectVisitor(void * addr,void * arg)256 static void markObjectVisitor(void *addr, void *arg)
257 {
258 Object *obj;
259
260 assert(addr != NULL);
261 assert(arg != NULL);
262 obj = *(Object **)addr;
263 if (obj != NULL) {
264 markObjectNonNull(obj, arg, true);
265 }
266 }
267
268 /*
269 * Grays all references in the roots.
270 */
dvmHeapReMarkRootSet(void)271 void dvmHeapReMarkRootSet(void)
272 {
273 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
274 assert(ctx->finger == (void *)ULONG_MAX);
275 dvmVisitRoots(markObjectVisitor, ctx);
276 }
277
278 /*
279 * Nothing past this point is allowed to use dvmMarkObject() or
280 * dvmMarkObjectNonNull(), which are for root-marking only.
281 * Scanning/recursion must use markObject(), which takes the finger
282 * into account.
283 */
284 #undef dvmMarkObject
285 #define dvmMarkObject __dont_use_dvmMarkObject__
286 #define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
287
288 /*
289 * Scans instance fields.
290 */
scanInstanceFields(const Object * obj,GcMarkContext * ctx)291 static void scanInstanceFields(const Object *obj, GcMarkContext *ctx)
292 {
293 assert(obj != NULL);
294 assert(obj->clazz != NULL);
295 assert(ctx != NULL);
296
297 if (obj->clazz->refOffsets != CLASS_WALK_SUPER) {
298 unsigned int refOffsets = obj->clazz->refOffsets;
299 while (refOffsets != 0) {
300 const int rshift = CLZ(refOffsets);
301 refOffsets &= ~(CLASS_HIGH_BIT >> rshift);
302 markObject(dvmGetFieldObject((Object*)obj,
303 CLASS_OFFSET_FROM_CLZ(rshift)), ctx);
304 }
305 } else {
306 ClassObject *clazz;
307 int i;
308 for (clazz = obj->clazz; clazz != NULL; clazz = clazz->super) {
309 InstField *field = clazz->ifields;
310 for (i = 0; i < clazz->ifieldRefCount; ++i, ++field) {
311 void *addr = BYTE_OFFSET((Object *)obj, field->byteOffset);
312 markObject(((JValue *)addr)->l, ctx);
313 }
314 }
315 }
316 }
317
318 /*
319 * Scans the header, static field references, and interface
320 * pointers of a class object.
321 */
scanClassObject(const ClassObject * obj,GcMarkContext * ctx)322 static void scanClassObject(const ClassObject *obj, GcMarkContext *ctx)
323 {
324 int i;
325
326 assert(obj != NULL);
327 assert(obj->obj.clazz == gDvm.classJavaLangClass);
328 assert(ctx != NULL);
329
330 markObject((Object *)obj->obj.clazz, ctx);
331 if (IS_CLASS_FLAG_SET(obj, CLASS_ISARRAY)) {
332 markObject((Object *)obj->elementClass, ctx);
333 }
334 /* Do super and the interfaces contain Objects and not dex idx values? */
335 if (obj->status > CLASS_IDX) {
336 markObject((Object *)obj->super, ctx);
337 }
338 markObject(obj->classLoader, ctx);
339 /* Scan static field references. */
340 for (i = 0; i < obj->sfieldCount; ++i) {
341 char ch = obj->sfields[i].field.signature[0];
342 if (ch == '[' || ch == 'L') {
343 markObject(obj->sfields[i].value.l, ctx);
344 }
345 }
346 /* Scan the instance fields. */
347 scanInstanceFields((const Object *)obj, ctx);
348 /* Scan interface references. */
349 if (obj->status > CLASS_IDX) {
350 for (i = 0; i < obj->interfaceCount; ++i) {
351 markObject((Object *)obj->interfaces[i], ctx);
352 }
353 }
354 }
355
356 /*
357 * Scans the header of all array objects. If the array object is
358 * specialized to a reference type, scans the array data as well.
359 */
scanArrayObject(const ArrayObject * obj,GcMarkContext * ctx)360 static void scanArrayObject(const ArrayObject *obj, GcMarkContext *ctx)
361 {
362 size_t i;
363
364 assert(obj != NULL);
365 assert(obj->obj.clazz != NULL);
366 assert(ctx != NULL);
367 /* Scan the class object reference. */
368 markObject((Object *)obj->obj.clazz, ctx);
369 if (IS_CLASS_FLAG_SET(obj->obj.clazz, CLASS_ISOBJECTARRAY)) {
370 /* Scan the array contents. */
371 Object **contents = (Object **)obj->contents;
372 for (i = 0; i < obj->length; ++i) {
373 markObject(contents[i], ctx);
374 }
375 }
376 }
377
378 /*
379 * Returns class flags relating to Reference subclasses.
380 */
referenceClassFlags(const Object * obj)381 static int referenceClassFlags(const Object *obj)
382 {
383 int flags = CLASS_ISREFERENCE |
384 CLASS_ISWEAKREFERENCE |
385 CLASS_ISPHANTOMREFERENCE;
386 return GET_CLASS_FLAG_GROUP(obj->clazz, flags);
387 }
388
389 /*
390 * Returns true if the object derives from SoftReference.
391 */
isSoftReference(const Object * obj)392 static bool isSoftReference(const Object *obj)
393 {
394 return referenceClassFlags(obj) == CLASS_ISREFERENCE;
395 }
396
397 /*
398 * Returns true if the object derives from WeakReference.
399 */
isWeakReference(const Object * obj)400 static bool isWeakReference(const Object *obj)
401 {
402 return referenceClassFlags(obj) & CLASS_ISWEAKREFERENCE;
403 }
404
405 /*
406 * Returns true if the object derives from PhantomReference.
407 */
isPhantomReference(const Object * obj)408 static bool isPhantomReference(const Object *obj)
409 {
410 return referenceClassFlags(obj) & CLASS_ISPHANTOMREFERENCE;
411 }
412
413 /*
414 * Adds a reference to the tail of a circular queue of references.
415 */
enqueuePendingReference(Object * ref,Object ** list)416 static void enqueuePendingReference(Object *ref, Object **list)
417 {
418 size_t offset;
419
420 assert(ref != NULL);
421 assert(list != NULL);
422 offset = gDvm.offJavaLangRefReference_pendingNext;
423 if (*list == NULL) {
424 dvmSetFieldObject(ref, offset, ref);
425 *list = ref;
426 } else {
427 Object *head = dvmGetFieldObject(*list, offset);
428 dvmSetFieldObject(ref, offset, head);
429 dvmSetFieldObject(*list, offset, ref);
430 }
431 }
432
433 /*
434 * Removes the reference at the head of a circular queue of
435 * references.
436 */
dequeuePendingReference(Object ** list)437 static Object *dequeuePendingReference(Object **list)
438 {
439 Object *ref, *head;
440 size_t offset;
441
442 assert(list != NULL);
443 assert(*list != NULL);
444 offset = gDvm.offJavaLangRefReference_pendingNext;
445 head = dvmGetFieldObject(*list, offset);
446 if (*list == head) {
447 ref = *list;
448 *list = NULL;
449 } else {
450 Object *next = dvmGetFieldObject(head, offset);
451 dvmSetFieldObject(*list, offset, next);
452 ref = head;
453 }
454 dvmSetFieldObject(ref, offset, NULL);
455 return ref;
456 }
457
458 /*
459 * Process the "referent" field in a java.lang.ref.Reference. If the
460 * referent has not yet been marked, put it on the appropriate list in
461 * the gcHeap for later processing.
462 */
delayReferenceReferent(Object * obj,GcMarkContext * ctx)463 static void delayReferenceReferent(Object *obj, GcMarkContext *ctx)
464 {
465 GcHeap *gcHeap = gDvm.gcHeap;
466 Object *pending, *referent;
467 size_t pendingNextOffset, referentOffset;
468
469 assert(obj != NULL);
470 assert(obj->clazz != NULL);
471 assert(IS_CLASS_FLAG_SET(obj->clazz, CLASS_ISREFERENCE));
472 assert(ctx != NULL);
473 pendingNextOffset = gDvm.offJavaLangRefReference_pendingNext;
474 referentOffset = gDvm.offJavaLangRefReference_referent;
475 pending = dvmGetFieldObject(obj, pendingNextOffset);
476 referent = dvmGetFieldObject(obj, referentOffset);
477 if (pending == NULL && referent != NULL && !isMarked(referent, ctx)) {
478 Object **list = NULL;
479 if (isSoftReference(obj)) {
480 list = &gcHeap->softReferences;
481 } else if (isWeakReference(obj)) {
482 list = &gcHeap->weakReferences;
483 } else if (isPhantomReference(obj)) {
484 list = &gcHeap->phantomReferences;
485 }
486 assert(list != NULL);
487 enqueuePendingReference(obj, list);
488 }
489 }
490
491 /*
492 * Scans the header and field references of a data object.
493 */
scanDataObject(DataObject * obj,GcMarkContext * ctx)494 static void scanDataObject(DataObject *obj, GcMarkContext *ctx)
495 {
496 assert(obj != NULL);
497 assert(obj->obj.clazz != NULL);
498 assert(ctx != NULL);
499 /* Scan the class object. */
500 markObject((Object *)obj->obj.clazz, ctx);
501 /* Scan the instance fields. */
502 scanInstanceFields((const Object *)obj, ctx);
503 if (IS_CLASS_FLAG_SET(obj->obj.clazz, CLASS_ISREFERENCE)) {
504 delayReferenceReferent((Object *)obj, ctx);
505 }
506 }
507
508 /*
509 * Scans an object reference. Determines the type of the reference
510 * and dispatches to a specialized scanning routine.
511 */
scanObject(const Object * obj,GcMarkContext * ctx)512 static void scanObject(const Object *obj, GcMarkContext *ctx)
513 {
514 assert(obj != NULL);
515 assert(ctx != NULL);
516 assert(obj->clazz != NULL);
517 #if WITH_HPROF
518 if (gDvm.gcHeap->hprofContext != NULL) {
519 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
520 }
521 #endif
522 /* Dispatch a type-specific scan routine. */
523 if (obj->clazz == gDvm.classJavaLangClass) {
524 scanClassObject((ClassObject *)obj, ctx);
525 } else if (IS_CLASS_FLAG_SET(obj->clazz, CLASS_ISARRAY)) {
526 scanArrayObject((ArrayObject *)obj, ctx);
527 } else {
528 scanDataObject((DataObject *)obj, ctx);
529 }
530 }
531
532 static void
processMarkStack(GcMarkContext * ctx)533 processMarkStack(GcMarkContext *ctx)
534 {
535 const Object **const base = ctx->stack.base;
536
537 /* Scan anything that's on the mark stack.
538 * We can't use the bitmaps anymore, so use
539 * a finger that points past the end of them.
540 */
541 ctx->finger = (void *)ULONG_MAX;
542 while (ctx->stack.top != base) {
543 scanObject(*ctx->stack.top++, ctx);
544 }
545 }
546
objectSize(const Object * obj)547 static size_t objectSize(const Object *obj)
548 {
549 assert(dvmIsValidObject(obj));
550 assert(dvmIsValidObject((Object *)obj->clazz));
551 if (IS_CLASS_FLAG_SET(obj->clazz, CLASS_ISARRAY)) {
552 return dvmArrayObjectSize((ArrayObject *)obj);
553 } else if (obj->clazz == gDvm.classJavaLangClass) {
554 return dvmClassObjectSize((ClassObject *)obj);
555 } else {
556 return obj->clazz->objectSize;
557 }
558 }
559
560 /*
561 * Scans forward to the header of the next marked object between start
562 * and limit. Returns NULL if no marked objects are in that region.
563 */
nextGrayObject(u1 * base,u1 * limit,HeapBitmap * markBits)564 static Object *nextGrayObject(u1 *base, u1 *limit, HeapBitmap *markBits)
565 {
566 u1 *ptr;
567
568 assert(base < limit);
569 assert(limit - base <= GC_CARD_SIZE);
570 for (ptr = base; ptr < limit; ptr += HB_OBJECT_ALIGNMENT) {
571 if (dvmHeapBitmapIsObjectBitSet(markBits, ptr))
572 return (Object *)ptr;
573 }
574 return NULL;
575 }
576
577 /*
578 * Scan the card table looking for objects that have been grayed by
579 * the mutator.
580 */
scanGrayObjects(GcMarkContext * ctx)581 static void scanGrayObjects(GcMarkContext *ctx)
582 {
583 GcHeap *h = gDvm.gcHeap;
584 HeapBitmap *markBits, *liveBits;
585 u1 *card, *baseCard, *limitCard;
586 size_t footprint;
587
588 markBits = ctx->bitmap;
589 liveBits = dvmHeapSourceGetLiveBits();
590 footprint = dvmHeapSourceGetValue(HS_FOOTPRINT, NULL, 0);
591 baseCard = &h->cardTableBase[0];
592 limitCard = dvmCardFromAddr((u1 *)dvmHeapSourceGetBase() + footprint);
593 assert(limitCard <= &h->cardTableBase[h->cardTableLength]);
594 for (card = baseCard; card != limitCard; ++card) {
595 if (*card == GC_CARD_DIRTY) {
596 /*
597 * The card is dirty. Scan all of the objects that
598 * intersect with the card address.
599 */
600 u1 *addr = dvmAddrFromCard(card);
601 /*
602 * Scan through all black objects that start on the
603 * current card.
604 */
605 u1 *limit = addr + GC_CARD_SIZE;
606 u1 *next = addr;
607 while (next < limit) {
608 Object *obj = nextGrayObject(next, limit, markBits);
609 if (obj == NULL)
610 break;
611 scanObject(obj, ctx);
612 next = (u1*)obj + ALIGN_UP(objectSize(obj), HB_OBJECT_ALIGNMENT);
613 }
614 }
615 }
616 }
617
618 /*
619 * Callback for scanning each object in the bitmap. The finger is set
620 * to the address corresponding to the lowest address in the next word
621 * of bits in the bitmap.
622 */
scanBitmapCallback(void * addr,void * finger,void * arg)623 static void scanBitmapCallback(void *addr, void *finger, void *arg)
624 {
625 GcMarkContext *ctx = arg;
626 ctx->finger = (void *)finger;
627 scanObject(addr, ctx);
628 }
629
630 /* Given bitmaps with the root set marked, find and mark all
631 * reachable objects. When this returns, the entire set of
632 * live objects will be marked and the mark stack will be empty.
633 */
dvmHeapScanMarkedObjects(void)634 void dvmHeapScanMarkedObjects(void)
635 {
636 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
637
638 assert(ctx->finger == NULL);
639
640 /* The bitmaps currently have bits set for the root set.
641 * Walk across the bitmaps and scan each object.
642 */
643 dvmHeapBitmapScanWalk(ctx->bitmap, scanBitmapCallback, ctx);
644
645 /* We've walked the mark bitmaps. Scan anything that's
646 * left on the mark stack.
647 */
648 processMarkStack(ctx);
649
650 LOG_SCAN("done with marked objects\n");
651 }
652
dvmHeapReScanMarkedObjects(void)653 void dvmHeapReScanMarkedObjects(void)
654 {
655 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
656
657 /*
658 * The finger must have been set to the maximum value to ensure
659 * that gray objects will be pushed onto the mark stack.
660 */
661 assert(ctx->finger == (void *)ULONG_MAX);
662 scanGrayObjects(ctx);
663 processMarkStack(ctx);
664 }
665
666 /*
667 * Clear the referent field.
668 */
clearReference(Object * reference)669 static void clearReference(Object *reference)
670 {
671 size_t offset = gDvm.offJavaLangRefReference_referent;
672 dvmSetFieldObject(reference, offset, NULL);
673 }
674
675 /*
676 * Returns true if the reference was registered with a reference queue
677 * and has not yet been enqueued.
678 */
isEnqueuable(const Object * reference)679 static bool isEnqueuable(const Object *reference)
680 {
681 Object *queue = dvmGetFieldObject(reference,
682 gDvm.offJavaLangRefReference_queue);
683 Object *queueNext = dvmGetFieldObject(reference,
684 gDvm.offJavaLangRefReference_queueNext);
685 return queue != NULL && queueNext == NULL;
686 }
687
688 /*
689 * Schedules a reference to be appended to its reference queue.
690 */
enqueueReference(Object * ref)691 static void enqueueReference(Object *ref)
692 {
693 assert(ref != NULL);
694 assert(dvmGetFieldObject(ref, gDvm.offJavaLangRefReference_queue) != NULL);
695 assert(dvmGetFieldObject(ref, gDvm.offJavaLangRefReference_queueNext) == NULL);
696 if (!dvmHeapAddRefToLargeTable(&gDvm.gcHeap->referenceOperations, ref)) {
697 LOGE_HEAP("enqueueReference(): no room for any more "
698 "reference operations\n");
699 dvmAbort();
700 }
701 }
702
703 /*
704 * Walks the reference list marking any references subject to the
705 * reference clearing policy. References with a black referent are
706 * removed from the list. References with white referents biased
707 * toward saving are blackened and also removed from the list.
708 */
dvmHandleSoftRefs(Object ** list)709 void dvmHandleSoftRefs(Object **list)
710 {
711 GcMarkContext *ctx;
712 Object *ref, *referent;
713 Object *clear;
714 size_t referentOffset;
715 size_t counter;
716 bool marked;
717
718 ctx = &gDvm.gcHeap->markContext;
719 referentOffset = gDvm.offJavaLangRefReference_referent;
720 clear = NULL;
721 counter = 0;
722 while (*list != NULL) {
723 ref = dequeuePendingReference(list);
724 referent = dvmGetFieldObject(ref, referentOffset);
725 if (referent == NULL) {
726 /* Referent was cleared by the user during marking. */
727 continue;
728 }
729 marked = isMarked(referent, ctx);
730 if (!marked && ((++counter) & 1)) {
731 /* Referent is white and biased toward saving, mark it. */
732 markObject(referent, ctx);
733 marked = true;
734 }
735 if (!marked) {
736 /* Referent is white, queue it for clearing. */
737 enqueuePendingReference(ref, &clear);
738 }
739 }
740 *list = clear;
741 /*
742 * Restart the mark with the newly black references added to the
743 * root set.
744 */
745 processMarkStack(ctx);
746 }
747
748 /*
749 * Unlink the reference list clearing references objects with white
750 * referents. Cleared references registered to a reference queue are
751 * scheduled for appending by the heap worker thread.
752 */
dvmClearWhiteRefs(Object ** list)753 void dvmClearWhiteRefs(Object **list)
754 {
755 GcMarkContext *ctx;
756 Object *ref, *referent;
757 size_t referentOffset;
758 bool doSignal;
759
760 ctx = &gDvm.gcHeap->markContext;
761 referentOffset = gDvm.offJavaLangRefReference_referent;
762 doSignal = false;
763 while (*list != NULL) {
764 ref = dequeuePendingReference(list);
765 referent = dvmGetFieldObject(ref, referentOffset);
766 if (referent != NULL && !isMarked(referent, ctx)) {
767 /* Referent is white, clear it. */
768 clearReference(ref);
769 if (isEnqueuable(ref)) {
770 enqueueReference(ref);
771 doSignal = true;
772 }
773 }
774 }
775 /*
776 * If we cleared a reference with a reference queue we must notify
777 * the heap worker to append the reference.
778 */
779 if (doSignal) {
780 dvmSignalHeapWorker(false);
781 }
782 assert(*list == NULL);
783 }
784
785 /* Find unreachable objects that need to be finalized,
786 * and schedule them for finalization.
787 */
dvmHeapScheduleFinalizations()788 void dvmHeapScheduleFinalizations()
789 {
790 HeapRefTable newPendingRefs;
791 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
792 Object **ref;
793 Object **lastRef;
794 size_t totalPendCount;
795 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
796
797 /*
798 * All reachable objects have been marked.
799 * Any unmarked finalizable objects need to be finalized.
800 */
801
802 /* Create a table that the new pending refs will
803 * be added to.
804 */
805 if (!dvmHeapInitHeapRefTable(&newPendingRefs)) {
806 //TODO: mark all finalizable refs and hope that
807 // we can schedule them next time. Watch out,
808 // because we may be expecting to free up space
809 // by calling finalizers.
810 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
811 "pending finalizations\n");
812 dvmAbort();
813 }
814
815 /* Walk through finalizableRefs and move any unmarked references
816 * to the list of new pending refs.
817 */
818 totalPendCount = 0;
819 while (finRefs != NULL) {
820 Object **gapRef;
821 size_t newPendCount = 0;
822
823 gapRef = ref = finRefs->refs.table;
824 lastRef = finRefs->refs.nextEntry;
825 while (ref < lastRef) {
826 if (!isMarked(*ref, ctx)) {
827 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
828 //TODO: add the current table and allocate
829 // a new, smaller one.
830 LOGE_GC("dvmHeapScheduleFinalizations(): "
831 "no room for any more pending finalizations: %zd\n",
832 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
833 dvmAbort();
834 }
835 newPendCount++;
836 } else {
837 /* This ref is marked, so will remain on finalizableRefs.
838 */
839 if (newPendCount > 0) {
840 /* Copy it up to fill the holes.
841 */
842 *gapRef++ = *ref;
843 } else {
844 /* No holes yet; don't bother copying.
845 */
846 gapRef++;
847 }
848 }
849 ref++;
850 }
851 finRefs->refs.nextEntry = gapRef;
852 //TODO: if the table is empty when we're done, free it.
853 totalPendCount += newPendCount;
854 finRefs = finRefs->next;
855 }
856 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
857 totalPendCount);
858 if (totalPendCount == 0) {
859 /* No objects required finalization.
860 * Free the empty temporary table.
861 */
862 dvmClearReferenceTable(&newPendingRefs);
863 return;
864 }
865
866 /* Add the new pending refs to the main list.
867 */
868 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
869 &newPendingRefs))
870 {
871 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
872 "pending finalizations\n");
873 dvmAbort();
874 }
875
876 //TODO: try compacting the main list with a memcpy loop
877
878 /* Mark the refs we just moved; we don't want them or their
879 * children to get swept yet.
880 */
881 ref = newPendingRefs.table;
882 lastRef = newPendingRefs.nextEntry;
883 assert(ref < lastRef);
884 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
885 while (ref < lastRef) {
886 assert(*ref != NULL);
887 markObject(*ref, ctx);
888 ref++;
889 }
890 HPROF_CLEAR_GC_SCAN_STATE();
891 processMarkStack(ctx);
892 dvmSignalHeapWorker(false);
893 }
894
dvmHeapFinishMarkStep()895 void dvmHeapFinishMarkStep()
896 {
897 GcMarkContext *ctx;
898
899 ctx = &gDvm.gcHeap->markContext;
900
901 /* The mark bits are now not needed.
902 */
903 dvmHeapSourceZeroMarkBitmap();
904
905 /* Clean up everything else associated with the marking process.
906 */
907 destroyMarkStack(&ctx->stack);
908
909 ctx->finger = NULL;
910 }
911
912 typedef struct {
913 size_t numObjects;
914 size_t numBytes;
915 bool isConcurrent;
916 } SweepContext;
917
sweepBitmapCallback(size_t numPtrs,void ** ptrs,void * arg)918 static void sweepBitmapCallback(size_t numPtrs, void **ptrs, void *arg)
919 {
920 SweepContext *ctx = arg;
921
922 if (ctx->isConcurrent) {
923 dvmLockHeap();
924 }
925 ctx->numBytes += dvmHeapSourceFreeList(numPtrs, ptrs);
926 ctx->numObjects += numPtrs;
927 if (ctx->isConcurrent) {
928 dvmUnlockHeap();
929 }
930 }
931
932 /*
933 * Returns true if the given object is unmarked. This assumes that
934 * the bitmaps have not yet been swapped.
935 */
isUnmarkedObject(void * object)936 static int isUnmarkedObject(void *object)
937 {
938 return !isMarked((void *)((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
939 &gDvm.gcHeap->markContext);
940 }
941
942 /*
943 * Process all the internal system structures that behave like
944 * weakly-held objects.
945 */
dvmHeapSweepSystemWeaks(void)946 void dvmHeapSweepSystemWeaks(void)
947 {
948 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
949 dvmSweepMonitorList(&gDvm.monitorList, isUnmarkedObject);
950 }
951
952 /*
953 * Walk through the list of objects that haven't been marked and free
954 * them. Assumes the bitmaps have been swapped.
955 */
dvmHeapSweepUnmarkedObjects(GcMode mode,bool isConcurrent,size_t * numObjects,size_t * numBytes)956 void dvmHeapSweepUnmarkedObjects(GcMode mode, bool isConcurrent,
957 size_t *numObjects, size_t *numBytes)
958 {
959 HeapBitmap currMark[HEAP_SOURCE_MAX_HEAP_COUNT];
960 HeapBitmap currLive[HEAP_SOURCE_MAX_HEAP_COUNT];
961 SweepContext ctx;
962 size_t numBitmaps, numSweepBitmaps;
963 size_t i;
964
965 numBitmaps = dvmHeapSourceGetNumHeaps();
966 dvmHeapSourceGetObjectBitmaps(currLive, currMark, numBitmaps);
967 if (mode == GC_PARTIAL) {
968 numSweepBitmaps = 1;
969 assert((uintptr_t)gDvm.gcHeap->markContext.immuneLimit == currLive[0].base);
970 } else {
971 numSweepBitmaps = numBitmaps;
972 }
973 ctx.numObjects = ctx.numBytes = 0;
974 ctx.isConcurrent = isConcurrent;
975 for (i = 0; i < numSweepBitmaps; i++) {
976 HeapBitmap* prevLive = &currMark[i];
977 HeapBitmap* prevMark = &currLive[i];
978 dvmHeapBitmapSweepWalk(prevLive, prevMark, sweepBitmapCallback, &ctx);
979 }
980 *numObjects = ctx.numObjects;
981 *numBytes = ctx.numBytes;
982 if (gDvm.allocProf.enabled) {
983 gDvm.allocProf.freeCount += ctx.numObjects;
984 gDvm.allocProf.freeSize += ctx.numBytes;
985 }
986 }
987