Home
last modified time | relevance | path

Searched refs:to_ref (Results 1 – 5 of 5) sorted by relevance

/art/runtime/gc/collector/
Dconcurrent_copying.cc200 mirror::Object* to_ref = Mark(self, from_ref); in MarkHeapReference() local
201 if (from_ref != to_ref) { in MarkHeapReference()
207 } while (!field->CasWeakRelaxed(from_ref, to_ref)); in MarkHeapReference()
519 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); in VisitRoots() local
520 if (to_ref != ref) { in VisitRoots()
521 *root = to_ref; in VisitRoots()
536 mirror::Object* to_ref = concurrent_copying_->Mark(self, ref); in VisitRoots() local
537 if (to_ref != ref) { in VisitRoots()
538 root->Assign(to_ref); in VisitRoots()
1845 void ConcurrentCopying::PushOntoMarkStack(Thread* const self, mirror::Object* to_ref) { in PushOntoMarkStack() argument
[all …]
Dconcurrent_copying-inl.h162 mirror::Object* to_ref = GetFwdPtr(from_ref); in Mark() local
163 if (to_ref == nullptr) { in Mark()
165 to_ref = Copy(self, from_ref, holder, offset); in Mark()
169 DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref)) in Mark()
170 << "from_ref=" << from_ref << " to_ref=" << to_ref; in Mark()
171 return to_ref; in Mark()
Dconcurrent_copying.h184 void Scan(mirror::Object* to_ref, size_t obj_size = 0) REQUIRES_SHARED(Locks::mutator_lock_)
216 void ProcessMarkStackRef(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_)
Dmark_compact.cc1561 mirror::Object* to_ref = reinterpret_cast<mirror::Object*>(start_addr - offset_within_obj); in CompactPage() local
1564 to_ref, in CompactPage()
1571 to_ref, in CompactPage()
1583 << " to_addr:" << to_ref in CompactPage()
/art/runtime/entrypoints/quick/
Dquick_jni_entrypoints.cc62 mirror::Object* to_ref = ReadBarrier::BarrierForRoot(declaring_class); in artJniReadBarrier() local
63 declaring_class->Assign(to_ref); in artJniReadBarrier()