1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "Dalvik.h"
18 #include "alloc/clz.h"
19 #include "alloc/HeapBitmap.h"
20 #include "alloc/HeapInternal.h"
21 #include "alloc/HeapSource.h"
22 #include "alloc/MarkSweep.h"
23 #include <limits.h> // for ULONG_MAX
24 #include <sys/mman.h> // for madvise(), mmap()
25 #include <cutils/ashmem.h>
26 #include <errno.h>
27
28 #define GC_DEBUG_PARANOID 2
29 #define GC_DEBUG_BASIC 1
30 #define GC_DEBUG_OFF 0
31 #define GC_DEBUG(l) (GC_DEBUG_LEVEL >= (l))
32
33 #if 1
34 #define GC_DEBUG_LEVEL GC_DEBUG_PARANOID
35 #else
36 #define GC_DEBUG_LEVEL GC_DEBUG_OFF
37 #endif
38
39 #define VERBOSE_GC 0
40
41 #define GC_LOG_TAG LOG_TAG "-gc"
42
43 #if LOG_NDEBUG
44 #define LOGV_GC(...) ((void)0)
45 #define LOGD_GC(...) ((void)0)
46 #else
47 #define LOGV_GC(...) LOG(LOG_VERBOSE, GC_LOG_TAG, __VA_ARGS__)
48 #define LOGD_GC(...) LOG(LOG_DEBUG, GC_LOG_TAG, __VA_ARGS__)
49 #endif
50
51 #if VERBOSE_GC
52 #define LOGVV_GC(...) LOGV_GC(__VA_ARGS__)
53 #else
54 #define LOGVV_GC(...) ((void)0)
55 #endif
56
57 #define LOGI_GC(...) LOG(LOG_INFO, GC_LOG_TAG, __VA_ARGS__)
58 #define LOGW_GC(...) LOG(LOG_WARN, GC_LOG_TAG, __VA_ARGS__)
59 #define LOGE_GC(...) LOG(LOG_ERROR, GC_LOG_TAG, __VA_ARGS__)
60
61 #define LOG_SCAN(...) LOGV_GC("SCAN: " __VA_ARGS__)
62 #define LOG_MARK(...) LOGV_GC("MARK: " __VA_ARGS__)
63 #define LOG_SWEEP(...) LOGV_GC("SWEEP: " __VA_ARGS__)
64 #define LOG_REF(...) LOGV_GC("REF: " __VA_ARGS__)
65
66 #define LOGV_SCAN(...) LOGVV_GC("SCAN: " __VA_ARGS__)
67 #define LOGV_MARK(...) LOGVV_GC("MARK: " __VA_ARGS__)
68 #define LOGV_SWEEP(...) LOGVV_GC("SWEEP: " __VA_ARGS__)
69 #define LOGV_REF(...) LOGVV_GC("REF: " __VA_ARGS__)
70
71 #if WITH_OBJECT_HEADERS
72 u2 gGeneration = 0;
73 static const Object *gMarkParent = NULL;
74 #endif
75
76 #ifndef PAGE_SIZE
77 #define PAGE_SIZE 4096
78 #endif
79 #define ALIGN_UP_TO_PAGE_SIZE(p) \
80 (((size_t)(p) + (PAGE_SIZE - 1)) & ~(PAGE_SIZE - 1))
81
82 /* Do not cast the result of this to a boolean; the only set bit
83 * may be > 1<<8.
84 */
85 static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
86 __attribute__((always_inline));
isMarked(const DvmHeapChunk * hc,const GcMarkContext * ctx)87 static inline long isMarked(const DvmHeapChunk *hc, const GcMarkContext *ctx)
88 {
89 return dvmHeapBitmapIsObjectBitSetInList(ctx->bitmaps, ctx->numBitmaps, hc);
90 }
91
92 static bool
createMarkStack(GcMarkStack * stack)93 createMarkStack(GcMarkStack *stack)
94 {
95 const Object **limit;
96 size_t size;
97 int fd, err;
98
99 /* Create a stack big enough for the worst possible case,
100 * where the heap is perfectly full of the smallest object.
101 * TODO: be better about memory usage; use a smaller stack with
102 * overflow detection and recovery.
103 */
104 size = dvmHeapSourceGetIdealFootprint() * sizeof(Object*) /
105 (sizeof(Object) + HEAP_SOURCE_CHUNK_OVERHEAD);
106 size = ALIGN_UP_TO_PAGE_SIZE(size);
107 fd = ashmem_create_region("dalvik-heap-markstack", size);
108 if (fd < 0) {
109 LOGE_GC("Could not create %d-byte ashmem mark stack: %s\n",
110 size, strerror(errno));
111 return false;
112 }
113 limit = (const Object **)mmap(NULL, size, PROT_READ | PROT_WRITE,
114 MAP_PRIVATE, fd, 0);
115 err = errno;
116 close(fd);
117 if (limit == MAP_FAILED) {
118 LOGE_GC("Could not mmap %d-byte ashmem mark stack: %s\n",
119 size, strerror(err));
120 return false;
121 }
122
123 memset(stack, 0, sizeof(*stack));
124 stack->limit = limit;
125 stack->base = (const Object **)((uintptr_t)limit + size);
126 stack->top = stack->base;
127
128 return true;
129 }
130
131 static void
destroyMarkStack(GcMarkStack * stack)132 destroyMarkStack(GcMarkStack *stack)
133 {
134 munmap((char *)stack->limit,
135 (uintptr_t)stack->base - (uintptr_t)stack->limit);
136 memset(stack, 0, sizeof(*stack));
137 }
138
139 #define MARK_STACK_PUSH(stack, obj) \
140 do { \
141 *--(stack).top = (obj); \
142 } while (false)
143
144 bool
dvmHeapBeginMarkStep()145 dvmHeapBeginMarkStep()
146 {
147 GcMarkContext *mc = &gDvm.gcHeap->markContext;
148 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
149 size_t numBitmaps;
150
151 if (!createMarkStack(&mc->stack)) {
152 return false;
153 }
154
155 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
156 HEAP_SOURCE_MAX_HEAP_COUNT);
157 if (numBitmaps <= 0) {
158 return false;
159 }
160
161 /* Create mark bitmaps that cover the same ranges as the
162 * current object bitmaps.
163 */
164 if (!dvmHeapBitmapInitListFromTemplates(mc->bitmaps, objectBitmaps,
165 numBitmaps, "mark"))
166 {
167 return false;
168 }
169
170 mc->numBitmaps = numBitmaps;
171 mc->finger = NULL;
172
173 #if WITH_OBJECT_HEADERS
174 gGeneration++;
175 #endif
176
177 return true;
178 }
179
180 static long setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
181 __attribute__((always_inline));
182 static long
setAndReturnMarkBit(GcMarkContext * ctx,const DvmHeapChunk * hc)183 setAndReturnMarkBit(GcMarkContext *ctx, const DvmHeapChunk *hc)
184 {
185 return dvmHeapBitmapSetAndReturnObjectBitInList(ctx->bitmaps,
186 ctx->numBitmaps, hc);
187 }
188
189 static void _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
190 bool checkFinger, bool forceStack)
191 __attribute__((always_inline));
192 static void
_markObjectNonNullCommon(const Object * obj,GcMarkContext * ctx,bool checkFinger,bool forceStack)193 _markObjectNonNullCommon(const Object *obj, GcMarkContext *ctx,
194 bool checkFinger, bool forceStack)
195 {
196 DvmHeapChunk *hc;
197
198 assert(obj != NULL);
199
200 #if GC_DEBUG(GC_DEBUG_PARANOID)
201 //TODO: make sure we're locked
202 assert(obj != (Object *)gDvm.unlinkedJavaLangClass);
203 assert(dvmIsValidObject(obj));
204 #endif
205
206 hc = ptr2chunk(obj);
207 if (!setAndReturnMarkBit(ctx, hc)) {
208 /* This object was not previously marked.
209 */
210 if (forceStack || (checkFinger && (void *)hc < ctx->finger)) {
211 /* This object will need to go on the mark stack.
212 */
213 MARK_STACK_PUSH(ctx->stack, obj);
214 }
215
216 #if WITH_OBJECT_HEADERS
217 if (hc->scanGeneration != hc->markGeneration) {
218 LOGE("markObject(0x%08x): wasn't scanned last time\n", (uint)obj);
219 dvmAbort();
220 }
221 if (hc->markGeneration == gGeneration) {
222 LOGE("markObject(0x%08x): already marked this generation\n",
223 (uint)obj);
224 dvmAbort();
225 }
226 hc->oldMarkGeneration = hc->markGeneration;
227 hc->markGeneration = gGeneration;
228 hc->markFingerOld = hc->markFinger;
229 hc->markFinger = ctx->finger;
230 if (gMarkParent != NULL) {
231 hc->parentOld = hc->parent;
232 hc->parent = gMarkParent;
233 } else {
234 hc->parent = (const Object *)((uintptr_t)hc->parent | 1);
235 }
236 hc->markCount++;
237 #endif
238 #if WITH_HPROF
239 if (gDvm.gcHeap->hprofContext != NULL) {
240 hprofMarkRootObject(gDvm.gcHeap->hprofContext, obj, 0);
241 }
242 #endif
243 #if DVM_TRACK_HEAP_MARKING
244 gDvm.gcHeap->markCount++;
245 gDvm.gcHeap->markSize += dvmHeapSourceChunkSize((void *)hc) +
246 HEAP_SOURCE_CHUNK_OVERHEAD;
247 #endif
248
249 /* obj->clazz can be NULL if we catch an object between
250 * dvmMalloc() and DVM_OBJECT_INIT(). This is ok.
251 */
252 LOGV_MARK("0x%08x %s\n", (uint)obj,
253 obj->clazz == NULL ? "<null class>" : obj->clazz->name);
254 }
255 }
256
257 /* Used to mark objects when recursing. Recursion is done by moving
258 * the finger across the bitmaps in address order and marking child
259 * objects. Any newly-marked objects whose addresses are lower than
260 * the finger won't be visited by the bitmap scan, so those objects
261 * need to be added to the mark stack.
262 */
263 static void
markObjectNonNull(const Object * obj,GcMarkContext * ctx)264 markObjectNonNull(const Object *obj, GcMarkContext *ctx)
265 {
266 _markObjectNonNullCommon(obj, ctx, true, false);
267 }
268
269 #define markObject(obj, ctx) \
270 do { \
271 Object *MO_obj_ = (Object *)(obj); \
272 if (MO_obj_ != NULL) { \
273 markObjectNonNull(MO_obj_, (ctx)); \
274 } \
275 } while (false)
276
277 /* If the object hasn't already been marked, mark it and
278 * schedule it to be scanned for references.
279 *
280 * obj may not be NULL. The macro dvmMarkObject() should
281 * be used in situations where a reference may be NULL.
282 *
283 * This function may only be called when marking the root
284 * set. When recursing, use the internal markObject[NonNull]().
285 */
286 void
dvmMarkObjectNonNull(const Object * obj)287 dvmMarkObjectNonNull(const Object *obj)
288 {
289 _markObjectNonNullCommon(obj, &gDvm.gcHeap->markContext, false, false);
290 }
291
292 /* Mark the set of root objects.
293 *
294 * Things we need to scan:
295 * - System classes defined by root classloader
296 * - For each thread:
297 * - Interpreted stack, from top to "curFrame"
298 * - Dalvik registers (args + local vars)
299 * - JNI local references
300 * - Automatic VM local references (TrackedAlloc)
301 * - Associated Thread/VMThread object
302 * - ThreadGroups (could track & start with these instead of working
303 * upward from Threads)
304 * - Exception currently being thrown, if present
305 * - JNI global references
306 * - Interned string table
307 * - Primitive classes
308 * - Special objects
309 * - gDvm.outOfMemoryObj
310 * - Objects allocated with ALLOC_NO_GC
311 * - Objects pending finalization (but not yet finalized)
312 * - Objects in debugger object registry
313 *
314 * Don't need:
315 * - Native stack (for in-progress stuff in the VM)
316 * - The TrackedAlloc stuff watches all native VM references.
317 */
dvmHeapMarkRootSet()318 void dvmHeapMarkRootSet()
319 {
320 HeapRefTable *refs;
321 GcHeap *gcHeap;
322 Object **op;
323
324 gcHeap = gDvm.gcHeap;
325
326 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_STICKY_CLASS, 0);
327
328 LOG_SCAN("root class loader\n");
329 dvmGcScanRootClassLoader();
330 LOG_SCAN("primitive classes\n");
331 dvmGcScanPrimitiveClasses();
332
333 /* dvmGcScanRootThreadGroups() sets a bunch of
334 * different scan states internally.
335 */
336 HPROF_CLEAR_GC_SCAN_STATE();
337
338 LOG_SCAN("root thread groups\n");
339 dvmGcScanRootThreadGroups();
340
341 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_INTERNED_STRING, 0);
342
343 LOG_SCAN("interned strings\n");
344 dvmGcScanInternedStrings();
345
346 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_JNI_GLOBAL, 0);
347
348 LOG_SCAN("JNI global refs\n");
349 dvmGcMarkJniGlobalRefs();
350
351 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
352
353 LOG_SCAN("pending reference operations\n");
354 dvmHeapMarkLargeTableRefs(gcHeap->referenceOperations, true);
355
356 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
357
358 LOG_SCAN("pending finalizations\n");
359 dvmHeapMarkLargeTableRefs(gcHeap->pendingFinalizationRefs, false);
360
361 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_DEBUGGER, 0);
362
363 LOG_SCAN("debugger refs\n");
364 dvmGcMarkDebuggerRefs();
365
366 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_VM_INTERNAL, 0);
367
368 /* Mark all ALLOC_NO_GC objects.
369 */
370 LOG_SCAN("ALLOC_NO_GC objects\n");
371 refs = &gcHeap->nonCollectableRefs;
372 op = refs->table;
373 while ((uintptr_t)op < (uintptr_t)refs->nextEntry) {
374 dvmMarkObjectNonNull(*(op++));
375 }
376
377 /* Mark any special objects we have sitting around.
378 */
379 LOG_SCAN("special objects\n");
380 dvmMarkObjectNonNull(gDvm.outOfMemoryObj);
381 dvmMarkObjectNonNull(gDvm.internalErrorObj);
382 dvmMarkObjectNonNull(gDvm.noClassDefFoundErrorObj);
383 //TODO: scan object references sitting in gDvm; use pointer begin & end
384
385 HPROF_CLEAR_GC_SCAN_STATE();
386 }
387
388 /*
389 * Nothing past this point is allowed to use dvmMarkObject*().
390 * Scanning/recursion must use markObject*(), which takes the
391 * finger into account.
392 */
393 #define dvmMarkObjectNonNull __dont_use_dvmMarkObjectNonNull__
394
395
396 /* Mark all of a ClassObject's interfaces.
397 */
markInterfaces(const ClassObject * clazz,GcMarkContext * ctx)398 static void markInterfaces(const ClassObject *clazz, GcMarkContext *ctx)
399 {
400 ClassObject **interfaces;
401 int interfaceCount;
402 int i;
403
404 /* Mark all interfaces.
405 */
406 interfaces = clazz->interfaces;
407 interfaceCount = clazz->interfaceCount;
408 for (i = 0; i < interfaceCount; i++) {
409 markObjectNonNull((Object *)*interfaces, ctx);
410 interfaces++;
411 }
412 }
413
414 /* Mark all objects referred to by a ClassObject's static fields.
415 */
scanStaticFields(const ClassObject * clazz,GcMarkContext * ctx)416 static void scanStaticFields(const ClassObject *clazz, GcMarkContext *ctx)
417 {
418 StaticField *f;
419 int i;
420
421 //TODO: Optimize this with a bit vector or something
422 f = clazz->sfields;
423 for (i = 0; i < clazz->sfieldCount; i++) {
424 char c = f->field.signature[0];
425 if (c == '[' || c == 'L') {
426 /* It's an array or class reference.
427 */
428 markObject((Object *)f->value.l, ctx);
429 }
430 f++;
431 }
432 }
433
434 /* Mark all objects referred to by a DataObject's instance fields.
435 */
scanInstanceFields(const DataObject * obj,ClassObject * clazz,GcMarkContext * ctx)436 static void scanInstanceFields(const DataObject *obj, ClassObject *clazz,
437 GcMarkContext *ctx)
438 {
439 if (false && clazz->refOffsets != CLASS_WALK_SUPER) {
440 unsigned int refOffsets = clazz->refOffsets;
441 while (refOffsets != 0) {
442 const int rshift = CLZ(refOffsets);
443 refOffsets &= ~(CLASS_HIGH_BIT >> rshift);
444 markObject(dvmGetFieldObject((Object*)obj,
445 CLASS_OFFSET_FROM_CLZ(rshift)), ctx);
446 }
447 } else {
448 while (clazz != NULL) {
449 InstField *f;
450 int i;
451
452 /* All of the fields that contain object references
453 * are guaranteed to be at the beginning of the ifields list.
454 */
455 f = clazz->ifields;
456 for (i = 0; i < clazz->ifieldRefCount; i++) {
457 /* Mark the array or object reference.
458 * May be NULL.
459 *
460 * Note that, per the comment on struct InstField,
461 * f->byteOffset is the offset from the beginning of
462 * obj, not the offset into obj->instanceData.
463 */
464 markObject(dvmGetFieldObject((Object*)obj, f->byteOffset), ctx);
465 f++;
466 }
467
468 /* This will be NULL when we hit java.lang.Object
469 */
470 clazz = clazz->super;
471 }
472 }
473 }
474
475 /* Mark all objects referred to by the array's contents.
476 */
scanObjectArray(const ArrayObject * array,GcMarkContext * ctx)477 static void scanObjectArray(const ArrayObject *array, GcMarkContext *ctx)
478 {
479 Object **contents;
480 u4 length;
481 u4 i;
482
483 contents = (Object **)array->contents;
484 length = array->length;
485
486 for (i = 0; i < length; i++) {
487 markObject(*contents, ctx); // may be NULL
488 contents++;
489 }
490 }
491
492 /* Mark all objects referred to by the ClassObject.
493 */
scanClassObject(const ClassObject * clazz,GcMarkContext * ctx)494 static void scanClassObject(const ClassObject *clazz, GcMarkContext *ctx)
495 {
496 LOGV_SCAN("---------> %s\n", clazz->name);
497
498 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
499 /* We're an array; mark the class object of the contents
500 * of the array.
501 *
502 * Note that we won't necessarily reach the array's element
503 * class by scanning the array contents; the array may be
504 * zero-length, or may only contain null objects.
505 */
506 markObjectNonNull((Object *)clazz->elementClass, ctx);
507 }
508
509 /* We scan these explicitly in case the only remaining
510 * reference to a particular class object is via a data
511 * object; we may not be guaranteed to reach all
512 * live class objects via a classloader.
513 */
514 markObject((Object *)clazz->super, ctx); // may be NULL (java.lang.Object)
515 markObject(clazz->classLoader, ctx); // may be NULL
516
517 scanStaticFields(clazz, ctx);
518 markInterfaces(clazz, ctx);
519 }
520
521 /* Mark all objects that obj refers to.
522 *
523 * Called on every object in markList.
524 */
scanObject(const Object * obj,GcMarkContext * ctx)525 static void scanObject(const Object *obj, GcMarkContext *ctx)
526 {
527 ClassObject *clazz;
528
529 assert(dvmIsValidObject(obj));
530 LOGV_SCAN("0x%08x %s\n", (uint)obj, obj->clazz->name);
531
532 #if WITH_HPROF
533 if (gDvm.gcHeap->hprofContext != NULL) {
534 hprofDumpHeapObject(gDvm.gcHeap->hprofContext, obj);
535 }
536 #endif
537
538 #if WITH_OBJECT_HEADERS
539 if (ptr2chunk(obj)->scanGeneration == gGeneration) {
540 LOGE("object 0x%08x was already scanned this generation\n",
541 (uintptr_t)obj);
542 dvmAbort();
543 }
544 ptr2chunk(obj)->oldScanGeneration = ptr2chunk(obj)->scanGeneration;
545 ptr2chunk(obj)->scanGeneration = gGeneration;
546 ptr2chunk(obj)->scanCount++;
547 #endif
548
549 /* Get and mark the class object for this particular instance.
550 */
551 clazz = obj->clazz;
552 if (clazz == NULL) {
553 /* This can happen if we catch an object between
554 * dvmMalloc() and DVM_OBJECT_INIT(). The object
555 * won't contain any references yet, so we can
556 * just skip it.
557 */
558 return;
559 } else if (clazz == gDvm.unlinkedJavaLangClass) {
560 /* This class hasn't been linked yet. We're guaranteed
561 * that the object doesn't contain any references that
562 * aren't already tracked, so we can skip scanning it.
563 *
564 * NOTE: unlinkedJavaLangClass is not on the heap, so
565 * it's very important that we don't try marking it.
566 */
567 return;
568 }
569
570 #if WITH_OBJECT_HEADERS
571 gMarkParent = obj;
572 #endif
573
574 assert(dvmIsValidObject((Object *)clazz));
575 markObjectNonNull((Object *)clazz, ctx);
576
577 /* Mark any references in this object.
578 */
579 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISARRAY)) {
580 /* It's an array object.
581 */
582 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISOBJECTARRAY)) {
583 /* It's an array of object references.
584 */
585 scanObjectArray((ArrayObject *)obj, ctx);
586 }
587 // else there's nothing else to scan
588 } else {
589 /* It's a DataObject-compatible object.
590 */
591 scanInstanceFields((DataObject *)obj, clazz, ctx);
592
593 if (IS_CLASS_FLAG_SET(clazz, CLASS_ISREFERENCE)) {
594 GcHeap *gcHeap = gDvm.gcHeap;
595 Object *referent;
596
597 /* It's a subclass of java/lang/ref/Reference.
598 * The fields in this class have been arranged
599 * such that scanInstanceFields() did not actually
600 * mark the "referent" field; we need to handle
601 * it specially.
602 *
603 * If the referent already has a strong mark (isMarked(referent)),
604 * we don't care about its reference status.
605 */
606 referent = dvmGetFieldObject(obj,
607 gDvm.offJavaLangRefReference_referent);
608 if (referent != NULL &&
609 !isMarked(ptr2chunk(referent), &gcHeap->markContext))
610 {
611 u4 refFlags;
612
613 if (gcHeap->markAllReferents) {
614 LOG_REF("Hard-marking a reference\n");
615
616 /* Don't bother with normal reference-following
617 * behavior, just mark the referent. This should
618 * only be used when following objects that just
619 * became scheduled for finalization.
620 */
621 markObjectNonNull(referent, ctx);
622 goto skip_reference;
623 }
624
625 /* See if this reference was handled by a previous GC.
626 */
627 if (dvmGetFieldObject(obj,
628 gDvm.offJavaLangRefReference_vmData) ==
629 SCHEDULED_REFERENCE_MAGIC)
630 {
631 LOG_REF("Skipping scheduled reference\n");
632
633 /* Don't reschedule it, but make sure that its
634 * referent doesn't get collected (in case it's
635 * a PhantomReference and wasn't cleared automatically).
636 */
637 //TODO: Mark these after handling all new refs of
638 // this strength, in case the new refs refer
639 // to the same referent. Not a very common
640 // case, though.
641 markObjectNonNull(referent, ctx);
642 goto skip_reference;
643 }
644
645 /* Find out what kind of reference is pointing
646 * to referent.
647 */
648 refFlags = GET_CLASS_FLAG_GROUP(clazz,
649 CLASS_ISREFERENCE |
650 CLASS_ISWEAKREFERENCE |
651 CLASS_ISPHANTOMREFERENCE);
652
653 /* We use the vmData field of Reference objects
654 * as a next pointer in a singly-linked list.
655 * That way, we don't need to allocate any memory
656 * while we're doing a GC.
657 */
658 #define ADD_REF_TO_LIST(list, ref) \
659 do { \
660 Object *ARTL_ref_ = (/*de-const*/Object *)(ref); \
661 dvmSetFieldObject(ARTL_ref_, \
662 gDvm.offJavaLangRefReference_vmData, list); \
663 list = ARTL_ref_; \
664 } while (false)
665
666 /* At this stage, we just keep track of all of
667 * the live references that we've seen. Later,
668 * we'll walk through each of these lists and
669 * deal with the referents.
670 */
671 if (refFlags == CLASS_ISREFERENCE) {
672 /* It's a soft reference. Depending on the state,
673 * we'll attempt to collect all of them, some of
674 * them, or none of them.
675 */
676 if (gcHeap->softReferenceCollectionState ==
677 SR_COLLECT_NONE)
678 {
679 sr_collect_none:
680 markObjectNonNull(referent, ctx);
681 } else if (gcHeap->softReferenceCollectionState ==
682 SR_COLLECT_ALL)
683 {
684 sr_collect_all:
685 ADD_REF_TO_LIST(gcHeap->softReferences, obj);
686 } else {
687 /* We'll only try to collect half of the
688 * referents.
689 */
690 if (gcHeap->softReferenceColor++ & 1) {
691 goto sr_collect_none;
692 }
693 goto sr_collect_all;
694 }
695 } else {
696 /* It's a weak or phantom reference.
697 * Clearing CLASS_ISREFERENCE will reveal which.
698 */
699 refFlags &= ~CLASS_ISREFERENCE;
700 if (refFlags == CLASS_ISWEAKREFERENCE) {
701 ADD_REF_TO_LIST(gcHeap->weakReferences, obj);
702 } else if (refFlags == CLASS_ISPHANTOMREFERENCE) {
703 ADD_REF_TO_LIST(gcHeap->phantomReferences, obj);
704 } else {
705 assert(!"Unknown reference type");
706 }
707 }
708 #undef ADD_REF_TO_LIST
709 }
710 }
711
712 skip_reference:
713 /* If this is a class object, mark various other things that
714 * its internals point to.
715 *
716 * All class objects are instances of java.lang.Class,
717 * including the java.lang.Class class object.
718 */
719 if (clazz == gDvm.classJavaLangClass) {
720 scanClassObject((ClassObject *)obj, ctx);
721 }
722 }
723
724 #if WITH_OBJECT_HEADERS
725 gMarkParent = NULL;
726 #endif
727 }
728
729 static void
processMarkStack(GcMarkContext * ctx)730 processMarkStack(GcMarkContext *ctx)
731 {
732 const Object **const base = ctx->stack.base;
733
734 /* Scan anything that's on the mark stack.
735 * We can't use the bitmaps anymore, so use
736 * a finger that points past the end of them.
737 */
738 ctx->finger = (void *)ULONG_MAX;
739 while (ctx->stack.top != base) {
740 scanObject(*ctx->stack.top++, ctx);
741 }
742 }
743
744 #ifndef NDEBUG
745 static uintptr_t gLastFinger = 0;
746 #endif
747
748 static bool
scanBitmapCallback(size_t numPtrs,void ** ptrs,const void * finger,void * arg)749 scanBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
750 {
751 GcMarkContext *ctx = (GcMarkContext *)arg;
752 size_t i;
753
754 #ifndef NDEBUG
755 assert((uintptr_t)finger >= gLastFinger);
756 gLastFinger = (uintptr_t)finger;
757 #endif
758
759 ctx->finger = finger;
760 for (i = 0; i < numPtrs; i++) {
761 /* The pointers we're getting back are DvmHeapChunks,
762 * not Objects.
763 */
764 scanObject(chunk2ptr(*ptrs++), ctx);
765 }
766
767 return true;
768 }
769
770 /* Given bitmaps with the root set marked, find and mark all
771 * reachable objects. When this returns, the entire set of
772 * live objects will be marked and the mark stack will be empty.
773 */
dvmHeapScanMarkedObjects()774 void dvmHeapScanMarkedObjects()
775 {
776 GcMarkContext *ctx = &gDvm.gcHeap->markContext;
777
778 assert(ctx->finger == NULL);
779
780 /* The bitmaps currently have bits set for the root set.
781 * Walk across the bitmaps and scan each object.
782 */
783 #ifndef NDEBUG
784 gLastFinger = 0;
785 #endif
786 dvmHeapBitmapWalkList(ctx->bitmaps, ctx->numBitmaps,
787 scanBitmapCallback, ctx);
788
789 /* We've walked the mark bitmaps. Scan anything that's
790 * left on the mark stack.
791 */
792 processMarkStack(ctx);
793
794 LOG_SCAN("done with marked objects\n");
795 }
796
797 /** @return true if we need to schedule a call to clear().
798 */
clearReference(Object * reference)799 static bool clearReference(Object *reference)
800 {
801 /* This is what the default implementation of Reference.clear()
802 * does. We're required to clear all references to a given
803 * referent atomically, so we can't pop in and out of interp
804 * code each time.
805 *
806 * Also, someone may have subclassed one of the basic Reference
807 * types, overriding clear(). We can't trust the clear()
808 * implementation to call super.clear(); we cannot let clear()
809 * resurrect the referent. If we clear it here, we can safely
810 * call any overriding implementations.
811 */
812 dvmSetFieldObject(reference,
813 gDvm.offJavaLangRefReference_referent, NULL);
814
815 #if FANCY_REFERENCE_SUBCLASS
816 /* See if clear() has actually been overridden. If so,
817 * we need to schedule a call to it before calling enqueue().
818 */
819 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_clear]->clazz !=
820 gDvm.classJavaLangRefReference)
821 {
822 /* clear() has been overridden; return true to indicate
823 * that we need to schedule a call to the real clear()
824 * implementation.
825 */
826 return true;
827 }
828 #endif
829
830 return false;
831 }
832
833 /** @return true if we need to schedule a call to enqueue().
834 */
enqueueReference(Object * reference)835 static bool enqueueReference(Object *reference)
836 {
837 #if FANCY_REFERENCE_SUBCLASS
838 /* See if this reference class has overridden enqueue();
839 * if not, we can take a shortcut.
840 */
841 if (reference->clazz->vtable[gDvm.voffJavaLangRefReference_enqueue]->clazz
842 == gDvm.classJavaLangRefReference)
843 #endif
844 {
845 Object *queue = dvmGetFieldObject(reference,
846 gDvm.offJavaLangRefReference_queue);
847 Object *queueNext = dvmGetFieldObject(reference,
848 gDvm.offJavaLangRefReference_queueNext);
849 if (queue == NULL || queueNext != NULL) {
850 /* There is no queue, or the reference has already
851 * been enqueued. The Reference.enqueue() method
852 * will do nothing even if we call it.
853 */
854 return false;
855 }
856 }
857
858 /* We need to call enqueue(), but if we called it from
859 * here we'd probably deadlock. Schedule a call.
860 */
861 return true;
862 }
863
864 /* All objects for stronger reference levels have been
865 * marked before this is called.
866 */
dvmHeapHandleReferences(Object * refListHead,enum RefType refType)867 void dvmHeapHandleReferences(Object *refListHead, enum RefType refType)
868 {
869 Object *reference;
870 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
871 const int offVmData = gDvm.offJavaLangRefReference_vmData;
872 const int offReferent = gDvm.offJavaLangRefReference_referent;
873 bool workRequired = false;
874
875 size_t numCleared = 0;
876 size_t numEnqueued = 0;
877 reference = refListHead;
878 while (reference != NULL) {
879 Object *next;
880 Object *referent;
881
882 /* Pull the interesting fields out of the Reference object.
883 */
884 next = dvmGetFieldObject(reference, offVmData);
885 referent = dvmGetFieldObject(reference, offReferent);
886
887 //TODO: when handling REF_PHANTOM, unlink any references
888 // that fail this initial if(). We need to re-walk
889 // the list, and it would be nice to avoid the extra
890 // work.
891 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
892 bool schedClear, schedEnqueue;
893
894 /* This is the strongest reference that refers to referent.
895 * Do the right thing.
896 */
897 switch (refType) {
898 case REF_SOFT:
899 case REF_WEAK:
900 schedClear = clearReference(reference);
901 schedEnqueue = enqueueReference(reference);
902 break;
903 case REF_PHANTOM:
904 /* PhantomReferences are not cleared automatically.
905 * Until someone clears it (or the reference itself
906 * is collected), the referent must remain alive.
907 *
908 * It's necessary to fully mark the referent because
909 * it will still be present during the next GC, and
910 * all objects that it points to must be valid.
911 * (The referent will be marked outside of this loop,
912 * after handing all references of this strength, in
913 * case multiple references point to the same object.)
914 */
915 schedClear = false;
916
917 /* A PhantomReference is only useful with a
918 * queue, but since it's possible to create one
919 * without a queue, we need to check.
920 */
921 schedEnqueue = enqueueReference(reference);
922 break;
923 default:
924 assert(!"Bad reference type");
925 schedClear = false;
926 schedEnqueue = false;
927 break;
928 }
929 numCleared += schedClear ? 1 : 0;
930 numEnqueued += schedEnqueue ? 1 : 0;
931
932 if (schedClear || schedEnqueue) {
933 uintptr_t workBits;
934
935 /* Stuff the clear/enqueue bits in the bottom of
936 * the pointer. Assumes that objects are 8-byte
937 * aligned.
938 *
939 * Note that we are adding the *Reference* (which
940 * is by definition already marked at this point) to
941 * this list; we're not adding the referent (which
942 * has already been cleared).
943 */
944 assert(((intptr_t)reference & 3) == 0);
945 assert(((WORKER_CLEAR | WORKER_ENQUEUE) & ~3) == 0);
946 workBits = (schedClear ? WORKER_CLEAR : 0) |
947 (schedEnqueue ? WORKER_ENQUEUE : 0);
948 if (!dvmHeapAddRefToLargeTable(
949 &gDvm.gcHeap->referenceOperations,
950 (Object *)((uintptr_t)reference | workBits)))
951 {
952 LOGE_HEAP("dvmMalloc(): no room for any more "
953 "reference operations\n");
954 dvmAbort();
955 }
956 workRequired = true;
957 }
958
959 if (refType != REF_PHANTOM) {
960 /* Let later GCs know not to reschedule this reference.
961 */
962 dvmSetFieldObject(reference, offVmData,
963 SCHEDULED_REFERENCE_MAGIC);
964 } // else this is handled later for REF_PHANTOM
965
966 } // else there was a stronger reference to the referent.
967
968 reference = next;
969 }
970 #define refType2str(r) \
971 ((r) == REF_SOFT ? "soft" : ( \
972 (r) == REF_WEAK ? "weak" : ( \
973 (r) == REF_PHANTOM ? "phantom" : "UNKNOWN" )))
974 LOGD_HEAP("dvmHeapHandleReferences(): cleared %zd, enqueued %zd %s references\n", numCleared, numEnqueued, refType2str(refType));
975
976 /* Walk though the reference list again, and mark any non-clear/marked
977 * referents. Only PhantomReferences can have non-clear referents
978 * at this point.
979 */
980 if (refType == REF_PHANTOM) {
981 bool scanRequired = false;
982
983 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_REFERENCE_CLEANUP, 0);
984 reference = refListHead;
985 while (reference != NULL) {
986 Object *next;
987 Object *referent;
988
989 /* Pull the interesting fields out of the Reference object.
990 */
991 next = dvmGetFieldObject(reference, offVmData);
992 referent = dvmGetFieldObject(reference, offReferent);
993
994 if (referent != NULL && !isMarked(ptr2chunk(referent), markContext)) {
995 markObjectNonNull(referent, markContext);
996 scanRequired = true;
997
998 /* Let later GCs know not to reschedule this reference.
999 */
1000 dvmSetFieldObject(reference, offVmData,
1001 SCHEDULED_REFERENCE_MAGIC);
1002 }
1003
1004 reference = next;
1005 }
1006 HPROF_CLEAR_GC_SCAN_STATE();
1007
1008 if (scanRequired) {
1009 processMarkStack(markContext);
1010 }
1011 }
1012
1013 if (workRequired) {
1014 dvmSignalHeapWorker(false);
1015 }
1016 }
1017
1018
1019 /* Find unreachable objects that need to be finalized,
1020 * and schedule them for finalization.
1021 */
dvmHeapScheduleFinalizations()1022 void dvmHeapScheduleFinalizations()
1023 {
1024 HeapRefTable newPendingRefs;
1025 LargeHeapRefTable *finRefs = gDvm.gcHeap->finalizableRefs;
1026 Object **ref;
1027 Object **lastRef;
1028 size_t totalPendCount;
1029 GcMarkContext *markContext = &gDvm.gcHeap->markContext;
1030
1031 /*
1032 * All reachable objects have been marked.
1033 * Any unmarked finalizable objects need to be finalized.
1034 */
1035
1036 /* Create a table that the new pending refs will
1037 * be added to.
1038 */
1039 if (!dvmHeapInitHeapRefTable(&newPendingRefs, 128)) {
1040 //TODO: mark all finalizable refs and hope that
1041 // we can schedule them next time. Watch out,
1042 // because we may be expecting to free up space
1043 // by calling finalizers.
1044 LOGE_GC("dvmHeapScheduleFinalizations(): no room for "
1045 "pending finalizations\n");
1046 dvmAbort();
1047 }
1048
1049 /* Walk through finalizableRefs and move any unmarked references
1050 * to the list of new pending refs.
1051 */
1052 totalPendCount = 0;
1053 while (finRefs != NULL) {
1054 Object **gapRef;
1055 size_t newPendCount = 0;
1056
1057 gapRef = ref = finRefs->refs.table;
1058 lastRef = finRefs->refs.nextEntry;
1059 while (ref < lastRef) {
1060 DvmHeapChunk *hc;
1061
1062 hc = ptr2chunk(*ref);
1063 if (!isMarked(hc, markContext)) {
1064 if (!dvmHeapAddToHeapRefTable(&newPendingRefs, *ref)) {
1065 //TODO: add the current table and allocate
1066 // a new, smaller one.
1067 LOGE_GC("dvmHeapScheduleFinalizations(): "
1068 "no room for any more pending finalizations: %zd\n",
1069 dvmHeapNumHeapRefTableEntries(&newPendingRefs));
1070 dvmAbort();
1071 }
1072 newPendCount++;
1073 } else {
1074 /* This ref is marked, so will remain on finalizableRefs.
1075 */
1076 if (newPendCount > 0) {
1077 /* Copy it up to fill the holes.
1078 */
1079 *gapRef++ = *ref;
1080 } else {
1081 /* No holes yet; don't bother copying.
1082 */
1083 gapRef++;
1084 }
1085 }
1086 ref++;
1087 }
1088 finRefs->refs.nextEntry = gapRef;
1089 //TODO: if the table is empty when we're done, free it.
1090 totalPendCount += newPendCount;
1091 finRefs = finRefs->next;
1092 }
1093 LOGD_GC("dvmHeapScheduleFinalizations(): %zd finalizers triggered.\n",
1094 totalPendCount);
1095 if (totalPendCount == 0) {
1096 /* No objects required finalization.
1097 * Free the empty temporary table.
1098 */
1099 dvmClearReferenceTable(&newPendingRefs);
1100 return;
1101 }
1102
1103 /* Add the new pending refs to the main list.
1104 */
1105 if (!dvmHeapAddTableToLargeTable(&gDvm.gcHeap->pendingFinalizationRefs,
1106 &newPendingRefs))
1107 {
1108 LOGE_GC("dvmHeapScheduleFinalizations(): can't insert new "
1109 "pending finalizations\n");
1110 dvmAbort();
1111 }
1112
1113 //TODO: try compacting the main list with a memcpy loop
1114
1115 /* Mark the refs we just moved; we don't want them or their
1116 * children to get swept yet.
1117 */
1118 ref = newPendingRefs.table;
1119 lastRef = newPendingRefs.nextEntry;
1120 assert(ref < lastRef);
1121 HPROF_SET_GC_SCAN_STATE(HPROF_ROOT_FINALIZING, 0);
1122 while (ref < lastRef) {
1123 markObjectNonNull(*ref, markContext);
1124 ref++;
1125 }
1126 HPROF_CLEAR_GC_SCAN_STATE();
1127
1128 /* Set markAllReferents so that we don't collect referents whose
1129 * only references are in final-reachable objects.
1130 * TODO: eventually provide normal reference behavior by properly
1131 * marking these references.
1132 */
1133 gDvm.gcHeap->markAllReferents = true;
1134 processMarkStack(markContext);
1135 gDvm.gcHeap->markAllReferents = false;
1136
1137 dvmSignalHeapWorker(false);
1138 }
1139
dvmHeapFinishMarkStep()1140 void dvmHeapFinishMarkStep()
1141 {
1142 HeapBitmap *markBitmap;
1143 HeapBitmap objectBitmap;
1144 GcMarkContext *markContext;
1145
1146 markContext = &gDvm.gcHeap->markContext;
1147
1148 /* The sweep step freed every object that appeared in the
1149 * HeapSource bitmaps that didn't appear in the mark bitmaps.
1150 * The new state of the HeapSource is exactly the final
1151 * mark bitmaps, so swap them in.
1152 *
1153 * The old bitmaps will be swapped into the context so that
1154 * we can clean them up.
1155 */
1156 dvmHeapSourceReplaceObjectBitmaps(markContext->bitmaps,
1157 markContext->numBitmaps);
1158
1159 /* Clean up the old HeapSource bitmaps and anything else associated
1160 * with the marking process.
1161 */
1162 dvmHeapBitmapDeleteList(markContext->bitmaps, markContext->numBitmaps);
1163 destroyMarkStack(&markContext->stack);
1164
1165 memset(markContext, 0, sizeof(*markContext));
1166 }
1167
1168 #if WITH_HPROF && WITH_HPROF_UNREACHABLE
1169 static bool
hprofUnreachableBitmapCallback(size_t numPtrs,void ** ptrs,const void * finger,void * arg)1170 hprofUnreachableBitmapCallback(size_t numPtrs, void **ptrs,
1171 const void *finger, void *arg)
1172 {
1173 hprof_context_t *hctx = (hprof_context_t *)arg;
1174 size_t i;
1175
1176 for (i = 0; i < numPtrs; i++) {
1177 Object *obj;
1178
1179 /* The pointers we're getting back are DvmHeapChunks, not
1180 * Objects.
1181 */
1182 obj = (Object *)chunk2ptr(*ptrs++);
1183
1184 hprofMarkRootObject(hctx, obj, 0);
1185 hprofDumpHeapObject(hctx, obj);
1186 }
1187
1188 return true;
1189 }
1190
1191 static void
hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],const HeapBitmap objectBitmaps[],size_t numBitmaps)1192 hprofDumpUnmarkedObjects(const HeapBitmap markBitmaps[],
1193 const HeapBitmap objectBitmaps[], size_t numBitmaps)
1194 {
1195 hprof_context_t *hctx = gDvm.gcHeap->hprofContext;
1196 if (hctx == NULL) {
1197 return;
1198 }
1199
1200 LOGI("hprof: dumping unreachable objects\n");
1201
1202 HPROF_SET_GC_SCAN_STATE(HPROF_UNREACHABLE, 0);
1203
1204 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1205 hprofUnreachableBitmapCallback, hctx);
1206
1207 HPROF_CLEAR_GC_SCAN_STATE();
1208 }
1209 #endif
1210
1211 static bool
sweepBitmapCallback(size_t numPtrs,void ** ptrs,const void * finger,void * arg)1212 sweepBitmapCallback(size_t numPtrs, void **ptrs, const void *finger, void *arg)
1213 {
1214 const ClassObject *const classJavaLangClass = gDvm.classJavaLangClass;
1215 size_t i;
1216 void **origPtrs = ptrs;
1217
1218 for (i = 0; i < numPtrs; i++) {
1219 DvmHeapChunk *hc;
1220 Object *obj;
1221
1222 /* The pointers we're getting back are DvmHeapChunks, not
1223 * Objects.
1224 */
1225 hc = (DvmHeapChunk *)*ptrs++;
1226 obj = (Object *)chunk2ptr(hc);
1227
1228 #if WITH_OBJECT_HEADERS
1229 if (hc->markGeneration == gGeneration) {
1230 LOGE("sweeping marked object: 0x%08x\n", (uint)obj);
1231 dvmAbort();
1232 }
1233 #endif
1234
1235 /* Free the monitor associated with the object.
1236 */
1237 dvmFreeObjectMonitor(obj);
1238
1239 /* NOTE: Dereferencing clazz is dangerous. If obj was the last
1240 * one to reference its class object, the class object could be
1241 * on the sweep list, and could already have been swept, leaving
1242 * us with a stale pointer.
1243 */
1244 LOGV_SWEEP("FREE: 0x%08x %s\n", (uint)obj, obj->clazz->name);
1245
1246 /* This assumes that java.lang.Class will never go away.
1247 * If it can, and we were the last reference to it, it
1248 * could have already been swept. However, even in that case,
1249 * gDvm.classJavaLangClass should still have a useful
1250 * value.
1251 */
1252 if (obj->clazz == classJavaLangClass) {
1253 LOGV_SWEEP("---------------> %s\n", ((ClassObject *)obj)->name);
1254 /* dvmFreeClassInnards() may have already been called,
1255 * but it's safe to call on the same ClassObject twice.
1256 */
1257 dvmFreeClassInnards((ClassObject *)obj);
1258 }
1259
1260 #if 0
1261 /* Overwrite the to-be-freed object to make stale references
1262 * more obvious.
1263 */
1264 {
1265 int chunklen;
1266 ClassObject *clazz = obj->clazz;
1267 #if WITH_OBJECT_HEADERS
1268 DvmHeapChunk chunk = *hc;
1269 chunk.header = ~OBJECT_HEADER | 1;
1270 #endif
1271 chunklen = dvmHeapSourceChunkSize(hc);
1272 memset(hc, 0xa5, chunklen);
1273 obj->clazz = (ClassObject *)((uintptr_t)clazz ^ 0xffffffff);
1274 #if WITH_OBJECT_HEADERS
1275 *hc = chunk;
1276 #endif
1277 }
1278 #endif
1279 }
1280 // TODO: dvmHeapSourceFreeList has a loop, just like the above
1281 // does. Consider collapsing the two loops to save overhead.
1282 dvmHeapSourceFreeList(numPtrs, origPtrs);
1283
1284 return true;
1285 }
1286
1287 /* A function suitable for passing to dvmHashForeachRemove()
1288 * to clear out any unmarked objects. Clears the low bits
1289 * of the pointer because the intern table may set them.
1290 */
isUnmarkedObject(void * object)1291 static int isUnmarkedObject(void *object)
1292 {
1293 return !isMarked(ptr2chunk((uintptr_t)object & ~(HB_OBJECT_ALIGNMENT-1)),
1294 &gDvm.gcHeap->markContext);
1295 }
1296
1297 /* Walk through the list of objects that haven't been
1298 * marked and free them.
1299 */
1300 void
dvmHeapSweepUnmarkedObjects(int * numFreed,size_t * sizeFreed)1301 dvmHeapSweepUnmarkedObjects(int *numFreed, size_t *sizeFreed)
1302 {
1303 const HeapBitmap *markBitmaps;
1304 const GcMarkContext *markContext;
1305 HeapBitmap objectBitmaps[HEAP_SOURCE_MAX_HEAP_COUNT];
1306 size_t origObjectsAllocated;
1307 size_t origBytesAllocated;
1308 size_t numBitmaps;
1309
1310 /* All reachable objects have been marked.
1311 * Detach any unreachable interned strings before
1312 * we sweep.
1313 */
1314 dvmGcDetachDeadInternedStrings(isUnmarkedObject);
1315
1316 /* Free any known objects that are not marked.
1317 */
1318 origObjectsAllocated = dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1319 origBytesAllocated = dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1320
1321 markContext = &gDvm.gcHeap->markContext;
1322 markBitmaps = markContext->bitmaps;
1323 numBitmaps = dvmHeapSourceGetObjectBitmaps(objectBitmaps,
1324 HEAP_SOURCE_MAX_HEAP_COUNT);
1325 #ifndef NDEBUG
1326 if (numBitmaps != markContext->numBitmaps) {
1327 LOGE("heap bitmap count mismatch: %zd != %zd\n",
1328 numBitmaps, markContext->numBitmaps);
1329 dvmAbort();
1330 }
1331 #endif
1332
1333 #if WITH_HPROF && WITH_HPROF_UNREACHABLE
1334 hprofDumpUnmarkedObjects(markBitmaps, objectBitmaps, numBitmaps);
1335 #endif
1336
1337 dvmHeapBitmapXorWalkLists(markBitmaps, objectBitmaps, numBitmaps,
1338 sweepBitmapCallback, NULL);
1339
1340 *numFreed = origObjectsAllocated -
1341 dvmHeapSourceGetValue(HS_OBJECTS_ALLOCATED, NULL, 0);
1342 *sizeFreed = origBytesAllocated -
1343 dvmHeapSourceGetValue(HS_BYTES_ALLOCATED, NULL, 0);
1344
1345 #ifdef WITH_PROFILER
1346 if (gDvm.allocProf.enabled) {
1347 gDvm.allocProf.freeCount += *numFreed;
1348 gDvm.allocProf.freeSize += *sizeFreed;
1349 }
1350 #endif
1351 }
1352