Lines Matching refs:Page
98 ASSERT((OffsetFrom(address) & Page::kPageAlignmentMask) == 0)
107 ASSERT((0 < size) && (size <= Page::kMaxNonCodeHeapObjectSize))
110 ASSERT((Page::kObjectStartOffset <= offset) \
111 && (offset <= Page::kPageSize))
648 class Page : public MemoryChunk {
654 INLINE(static Page* FromAddress(Address a)) { in INLINE()
655 return reinterpret_cast<Page*>(OffsetFrom(a) & ~kPageAlignmentMask); in INLINE()
662 INLINE(static Page* FromAllocationTop(Address top)) { in INLINE()
663 Page* p = FromAddress(top - kPointerSize); in INLINE()
668 inline Page* next_page();
669 inline Page* prev_page();
670 inline void set_next_page(Page* page);
671 inline void set_prev_page(Page* page);
706 static inline Page* Initialize(Heap* heap,
731 STATIC_CHECK(sizeof(Page) <= MemoryChunk::kHeaderSize);
852 ASSERT(size >= static_cast<size_t>(Page::kPageSize)); in FreeBlock()
857 ASSERT(size >= static_cast<size_t>(Page::kPageSize)); in FreeBlock()
911 return (OffsetFrom(addr) & Page::kPageAlignmentMask) >> kRegionSizeLog2; in RegionNumber()
915 Page* page = Page::FromAddress(addr); in Update()
928 static const int kSize = Page::kPageSize / kRegionSize;
930 STATIC_ASSERT(Page::kPageSize % kRegionSize == 0);
953 Page* AllocatePage(PagedSpace* owner, Executability executable);
978 return (Available() / Page::kPageSize) * Page::kMaxNonCodeHeapObjectSize; in MaxAvailable()
1079 Page* InitializePagesInChunk(int chunk_id, int pages_in_chunk,
1118 HeapObjectIterator(Page* page, HeapObjectCallback size_func);
1168 inline Page* next();
1172 Page* prev_page_; // Previous page returned.
1175 Page* next_page_;
1195 return (Page::FromAllocationTop(top) == Page::FromAllocationTop(limit)) in VerifyPagedAllocation()
1392 void CountFreeListItems(Page* p, SizeStats* sizes);
1394 intptr_t EvictFreeListItems(Page* p);
1399 static const int kMaxBlockSize = Page::kMaxNonCodeHeapObjectSize;
1526 Page::FromAddress(top) == Page::FromAddress(limit - 1)); in SetTop()
1540 void ReleasePage(Page* page);
1546 Page* anchor() { return &anchor_; } in anchor()
1573 static bool ShouldBeSweptLazily(Page* p) { in ShouldBeSweptLazily()
1575 !p->IsFlagSet(Page::RESCAN_ON_EVACUATION) && in ShouldBeSweptLazily()
1579 void SetPagesToSweep(Page* first) { in SetPagesToSweep()
1589 void IncreaseUnsweptFreeBytes(Page* p) { in IncreaseUnsweptFreeBytes()
1594 void DecreaseUnsweptFreeBytes(Page* p) { in DecreaseUnsweptFreeBytes()
1605 Page* FirstPage() { return anchor_.next_page(); } in FirstPage()
1606 Page* LastPage() { return anchor_.prev_page(); } in LastPage()
1608 void CountFreeListItems(Page* p, FreeList::SizeStats* sizes) { in CountFreeListItems()
1634 Page anchor_;
1652 Page* first_unswept_page_;
1728 static const int kAreaSize = Page::kNonCodeObjectAreaSize;
1753 return (reinterpret_cast<intptr_t>(addr) & Page::kPageAlignmentMask) in IsAtStart()
1758 return (reinterpret_cast<intptr_t>(addr) & Page::kPageAlignmentMask) == 0; in IsAtEnd()
1769 ~Page::kPageAlignmentMask); in FromAddress()
2113 return (to_space_.Capacity() / Page::kPageSize) * NewSpacePage::kAreaSize; in EffectiveCapacity()
2329 virtual Address PageAllocationLimit(Page* page) { in PageAllocationLimit()
2359 page_extra_ = Page::kNonCodeObjectAreaSize % object_size_in_bytes; in FixedSpace()
2363 virtual Address PageAllocationLimit(Page* page) { in PageAllocationLimit()
2415 static const int kMapsPerPage = Page::kNonCodeObjectAreaSize / Map::kSize;
2476 if (chunk_size <= (Page::kPageSize + Page::kObjectStartOffset)) return 0; in ObjectSizeFor()
2477 return chunk_size - Page::kPageSize - Page::kObjectStartOffset; in ObjectSizeFor()