• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_H_
18 #define ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_H_
19 
20 #include "space.h"
21 
22 #include "base/mutex.h"
23 
24 namespace art {
25 
26 namespace mirror {
27 class Object;
28 }
29 
30 namespace gc {
31 
32 namespace collector {
33 class MarkSweep;
34 }  // namespace collector
35 
36 namespace space {
37 
38 // A bump pointer space allocates by incrementing a pointer, it doesn't provide a free
39 // implementation as its intended to be evacuated.
40 class BumpPointerSpace final : public ContinuousMemMapAllocSpace {
41  public:
42   typedef void(*WalkCallback)(void *start, void *end, size_t num_bytes, void* callback_arg);
43 
GetType()44   SpaceType GetType() const override {
45     return kSpaceTypeBumpPointerSpace;
46   }
47 
48   // Create a bump pointer space with the requested sizes. The requested base address is not
49   // guaranteed to be granted, if it is required, the caller should call Begin on the returned
50   // space to confirm the request was granted.
51   static BumpPointerSpace* Create(const std::string& name, size_t capacity);
52   static BumpPointerSpace* CreateFromMemMap(const std::string& name, MemMap&& mem_map);
53 
54   // Allocate num_bytes, returns null if the space is full.
55   mirror::Object* Alloc(Thread* self, size_t num_bytes, size_t* bytes_allocated,
56                         size_t* usable_size, size_t* bytes_tl_bulk_allocated) override;
57   // Thread-unsafe allocation for when mutators are suspended, used by the semispace collector.
58   mirror::Object* AllocThreadUnsafe(Thread* self, size_t num_bytes, size_t* bytes_allocated,
59                                     size_t* usable_size, size_t* bytes_tl_bulk_allocated)
60       override REQUIRES(Locks::mutator_lock_);
61 
62   mirror::Object* AllocNonvirtual(size_t num_bytes);
63   mirror::Object* AllocNonvirtualWithoutAccounting(size_t num_bytes);
64 
65   // Return the storage space required by obj.
AllocationSize(mirror::Object * obj,size_t * usable_size)66   size_t AllocationSize(mirror::Object* obj, size_t* usable_size) override
67       REQUIRES_SHARED(Locks::mutator_lock_) {
68     return AllocationSizeNonvirtual(obj, usable_size);
69   }
70 
71   // NOPS unless we support free lists.
Free(Thread *,mirror::Object *)72   size_t Free(Thread*, mirror::Object*) override {
73     return 0;
74   }
75 
FreeList(Thread *,size_t,mirror::Object **)76   size_t FreeList(Thread*, size_t, mirror::Object**) override {
77     return 0;
78   }
79 
80   size_t AllocationSizeNonvirtual(mirror::Object* obj, size_t* usable_size)
81       REQUIRES_SHARED(Locks::mutator_lock_);
82 
83   // Removes the fork time growth limit on capacity, allowing the application to allocate up to the
84   // maximum reserved size of the heap.
ClearGrowthLimit()85   void ClearGrowthLimit() {
86     growth_end_ = Limit();
87   }
88 
89   // Override capacity so that we only return the possibly limited capacity
Capacity()90   size_t Capacity() const override {
91     return growth_end_ - begin_;
92   }
93 
94   // The total amount of memory reserved for the space.
NonGrowthLimitCapacity()95   size_t NonGrowthLimitCapacity() const override {
96     return GetMemMap()->Size();
97   }
98 
GetLiveBitmap()99   accounting::ContinuousSpaceBitmap* GetLiveBitmap() const override {
100     return nullptr;
101   }
102 
GetMarkBitmap()103   accounting::ContinuousSpaceBitmap* GetMarkBitmap() const override {
104     return nullptr;
105   }
106 
107   // Reset the space to empty.
108   void Clear() override REQUIRES(!block_lock_);
109 
110   void Dump(std::ostream& os) const override;
111 
112   size_t RevokeThreadLocalBuffers(Thread* thread) override REQUIRES(!block_lock_);
113   size_t RevokeAllThreadLocalBuffers() override
114       REQUIRES(!Locks::runtime_shutdown_lock_, !Locks::thread_list_lock_, !block_lock_);
115   void AssertThreadLocalBuffersAreRevoked(Thread* thread) REQUIRES(!block_lock_);
116   void AssertAllThreadLocalBuffersAreRevoked()
117       REQUIRES(!Locks::runtime_shutdown_lock_, !Locks::thread_list_lock_, !block_lock_);
118 
119   uint64_t GetBytesAllocated() override REQUIRES_SHARED(Locks::mutator_lock_)
120       REQUIRES(!*Locks::runtime_shutdown_lock_, !*Locks::thread_list_lock_, !block_lock_);
121   uint64_t GetObjectsAllocated() override REQUIRES_SHARED(Locks::mutator_lock_)
122       REQUIRES(!*Locks::runtime_shutdown_lock_, !*Locks::thread_list_lock_, !block_lock_);
IsEmpty()123   bool IsEmpty() const {
124     return Begin() == End();
125   }
126 
CanMoveObjects()127   bool CanMoveObjects() const override {
128     return true;
129   }
130 
Contains(const mirror::Object * obj)131   bool Contains(const mirror::Object* obj) const override {
132     const uint8_t* byte_obj = reinterpret_cast<const uint8_t*>(obj);
133     return byte_obj >= Begin() && byte_obj < End();
134   }
135 
136   // TODO: Change this? Mainly used for compacting to a particular region of memory.
137   BumpPointerSpace(const std::string& name, uint8_t* begin, uint8_t* limit);
138 
139   // Return the object which comes after obj, while ensuring alignment.
140   static mirror::Object* GetNextObject(mirror::Object* obj)
141       REQUIRES_SHARED(Locks::mutator_lock_);
142 
143   // Allocate a new TLAB, returns false if the allocation failed.
144   bool AllocNewTlab(Thread* self, size_t bytes) REQUIRES(!block_lock_);
145 
AsBumpPointerSpace()146   BumpPointerSpace* AsBumpPointerSpace() override {
147     return this;
148   }
149 
150   // Go through all of the blocks and visit the continuous objects.
151   template <typename Visitor>
152   ALWAYS_INLINE void Walk(Visitor&& visitor)
153       REQUIRES_SHARED(Locks::mutator_lock_)
154       REQUIRES(!block_lock_);
155 
156   accounting::ContinuousSpaceBitmap::SweepCallback* GetSweepCallback() override;
157 
158   // Record objects / bytes freed.
RecordFree(int32_t objects,int32_t bytes)159   void RecordFree(int32_t objects, int32_t bytes) {
160     objects_allocated_.fetch_sub(objects, std::memory_order_relaxed);
161     bytes_allocated_.fetch_sub(bytes, std::memory_order_relaxed);
162   }
163 
164   void LogFragmentationAllocFailure(std::ostream& os, size_t failed_alloc_bytes) override
165       REQUIRES_SHARED(Locks::mutator_lock_);
166 
167   // Object alignment within the space.
168   static constexpr size_t kAlignment = 8;
169 
170  protected:
171   BumpPointerSpace(const std::string& name, MemMap&& mem_map);
172 
173   // Allocate a raw block of bytes.
174   uint8_t* AllocBlock(size_t bytes) REQUIRES(block_lock_);
175   void RevokeThreadLocalBuffersLocked(Thread* thread) REQUIRES(block_lock_);
176 
177   // The main block is an unbounded block where objects go when there are no other blocks. This
178   // enables us to maintain tightly packed objects when you are not using thread local buffers for
179   // allocation. The main block starts at the space Begin().
180   void UpdateMainBlock() REQUIRES(block_lock_);
181 
182   uint8_t* growth_end_;
183   AtomicInteger objects_allocated_;  // Accumulated from revoked thread local regions.
184   AtomicInteger bytes_allocated_;  // Accumulated from revoked thread local regions.
185   Mutex block_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
186   // The objects at the start of the space are stored in the main block. The main block doesn't
187   // have a header, this lets us walk empty spaces which are mprotected.
188   size_t main_block_size_ GUARDED_BY(block_lock_);
189   // The number of blocks in the space, if it is 0 then the space has one long continuous block
190   // which doesn't have an updated header.
191   size_t num_blocks_ GUARDED_BY(block_lock_);
192 
193  private:
194   struct BlockHeader {
195     size_t size_;  // Size of the block in bytes, does not include the header.
196     size_t unused_;  // Ensures alignment of kAlignment.
197   };
198 
199   static_assert(sizeof(BlockHeader) % kAlignment == 0,
200                 "continuous block must be kAlignment aligned");
201 
202   friend class collector::MarkSweep;
203   DISALLOW_COPY_AND_ASSIGN(BumpPointerSpace);
204 };
205 
206 }  // namespace space
207 }  // namespace gc
208 }  // namespace art
209 
210 #endif  // ART_RUNTIME_GC_SPACE_BUMP_POINTER_SPACE_H_
211