• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 // This is a GPU-backend specific test. It relies on static intializers to work
9 
10 #include "SkTypes.h"
11 
12 #if SK_SUPPORT_GPU && SK_ALLOW_STATIC_GLOBAL_INITIALIZERS && defined(SK_VULKAN)
13 
14 #include "GrContextFactory.h"
15 #include "GrTest.h"
16 #include "Test.h"
17 #include "vk/GrVkGpu.h"
18 
19 using sk_gpu_test::GrContextFactory;
20 
subheap_test(skiatest::Reporter * reporter,GrContext * context)21 void subheap_test(skiatest::Reporter* reporter, GrContext* context) {
22     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->getGpu());
23 
24     // memtype doesn't matter, we're just testing the suballocation algorithm so we'll use 0
25     GrVkSubHeap heap(gpu, 0, 0, 64 * 1024, 32);
26     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
27     // test full allocation and free
28     REPORTER_ASSERT(reporter, heap.alloc(64 * 1024, &alloc0));
29     REPORTER_ASSERT(reporter, alloc0.fOffset == 0);
30     REPORTER_ASSERT(reporter, alloc0.fSize == 64 * 1024);
31     REPORTER_ASSERT(reporter, heap.freeSize() == 0 && heap.largestBlockSize() == 0);
32     heap.free(alloc0);
33     REPORTER_ASSERT(reporter, heap.freeSize() == 64*1024 && heap.largestBlockSize() == 64 * 1024);
34 
35     // now let's suballoc some memory
36     REPORTER_ASSERT(reporter, heap.alloc(16 * 1024, &alloc0));
37     REPORTER_ASSERT(reporter, heap.alloc(23 * 1024, &alloc1));
38     REPORTER_ASSERT(reporter, heap.alloc(18 * 1024, &alloc2));
39     REPORTER_ASSERT(reporter, heap.freeSize() == 7 * 1024 && heap.largestBlockSize() == 7 * 1024);
40     // free lone block
41     heap.free(alloc1);
42     REPORTER_ASSERT(reporter, heap.freeSize() == 30 * 1024 && heap.largestBlockSize() == 23 * 1024);
43     // allocate into smallest free block
44     REPORTER_ASSERT(reporter, heap.alloc(6 * 1024, &alloc3));
45     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 23 * 1024);
46     // allocate into exact size free block
47     REPORTER_ASSERT(reporter, heap.alloc(23 * 1024, &alloc1));
48     REPORTER_ASSERT(reporter, heap.freeSize() == 1 * 1024 && heap.largestBlockSize() == 1 * 1024);
49     // free lone block
50     heap.free(alloc2);
51     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 18 * 1024);
52     // free and merge with preceding block and following
53     heap.free(alloc3);
54     REPORTER_ASSERT(reporter, heap.freeSize() == 25 * 1024 && heap.largestBlockSize() == 25 * 1024);
55     // free and merge with following block
56     heap.free(alloc1);
57     REPORTER_ASSERT(reporter, heap.freeSize() == 48 * 1024 && heap.largestBlockSize() == 48 * 1024);
58     // free starting block and merge with following
59     heap.free(alloc0);
60     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
61 
62     // realloc
63     REPORTER_ASSERT(reporter, heap.alloc(4 * 1024, &alloc0));
64     REPORTER_ASSERT(reporter, heap.alloc(35 * 1024, &alloc1));
65     REPORTER_ASSERT(reporter, heap.alloc(10 * 1024, &alloc2));
66     REPORTER_ASSERT(reporter, heap.freeSize() == 15 * 1024 && heap.largestBlockSize() == 15 * 1024);
67     // free starting block and merge with following
68     heap.free(alloc0);
69     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 15 * 1024);
70     // free block and merge with preceding
71     heap.free(alloc1);
72     REPORTER_ASSERT(reporter, heap.freeSize() == 54 * 1024 && heap.largestBlockSize() == 39 * 1024);
73     // free block and merge with preceding and following
74     heap.free(alloc2);
75     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
76 
77     // fragment
78     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024, &alloc0));
79     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024, &alloc1));
80     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024, &alloc2));
81     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024, &alloc3));
82     REPORTER_ASSERT(reporter, heap.freeSize() == 22 * 1024 && heap.largestBlockSize() == 22 * 1024);
83     heap.free(alloc0);
84     REPORTER_ASSERT(reporter, heap.freeSize() == 41 * 1024 && heap.largestBlockSize() == 22 * 1024);
85     heap.free(alloc2);
86     REPORTER_ASSERT(reporter, heap.freeSize() == 56 * 1024 && heap.largestBlockSize() == 22 * 1024);
87     REPORTER_ASSERT(reporter, !heap.alloc(40 * 1024, &alloc0));
88     heap.free(alloc3);
89     REPORTER_ASSERT(reporter, heap.freeSize() == 59 * 1024 && heap.largestBlockSize() == 40 * 1024);
90     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, &alloc0));
91     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 19 * 1024);
92     heap.free(alloc1);
93     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 24 * 1024);
94     heap.free(alloc0);
95     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
96 
97     // unaligned sizes
98     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024 - 31, &alloc0));
99     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 5, &alloc1));
100     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 19, &alloc2));
101     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 3, &alloc3));
102     REPORTER_ASSERT(reporter, heap.freeSize() == 22 * 1024 && heap.largestBlockSize() == 22 * 1024);
103     heap.free(alloc0);
104     REPORTER_ASSERT(reporter, heap.freeSize() == 41 * 1024 && heap.largestBlockSize() == 22 * 1024);
105     heap.free(alloc2);
106     REPORTER_ASSERT(reporter, heap.freeSize() == 56 * 1024 && heap.largestBlockSize() == 22 * 1024);
107     REPORTER_ASSERT(reporter, !heap.alloc(40 * 1024, &alloc0));
108     heap.free(alloc3);
109     REPORTER_ASSERT(reporter, heap.freeSize() == 59 * 1024 && heap.largestBlockSize() == 40 * 1024);
110     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, &alloc0));
111     REPORTER_ASSERT(reporter, heap.freeSize() == 19 * 1024 && heap.largestBlockSize() == 19 * 1024);
112     heap.free(alloc1);
113     REPORTER_ASSERT(reporter, heap.freeSize() == 24 * 1024 && heap.largestBlockSize() == 24 * 1024);
114     heap.free(alloc0);
115     REPORTER_ASSERT(reporter, heap.freeSize() == 64 * 1024 && heap.largestBlockSize() == 64 * 1024);
116 }
117 
suballoc_test(skiatest::Reporter * reporter,GrContext * context)118 void suballoc_test(skiatest::Reporter* reporter, GrContext* context) {
119     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->getGpu());
120 
121     // memtype/heap index don't matter, we're just testing the allocation algorithm so we'll use 0
122     GrVkHeap heap(gpu, GrVkHeap::kSubAlloc_Strategy, 64 * 1024);
123     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
124     const VkDeviceSize kAlignment = 16;
125     const uint32_t kMemType = 0;
126     const uint32_t kHeapIndex = 0;
127 
128     REPORTER_ASSERT(reporter, heap.allocSize() == 0 && heap.usedSize() == 0);
129 
130     // fragment allocations so we need to grow heap
131     REPORTER_ASSERT(reporter, heap.alloc(19 * 1024 - 3, kAlignment, kMemType, kHeapIndex, &alloc0));
132     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 9, kAlignment, kMemType, kHeapIndex, &alloc1));
133     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 15, kAlignment, kMemType, kHeapIndex, &alloc2));
134     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 6, kAlignment, kMemType, kHeapIndex, &alloc3));
135     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 42 * 1024);
136     heap.free(alloc0);
137     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 23 * 1024);
138     heap.free(alloc2);
139     REPORTER_ASSERT(reporter, heap.allocSize() == 64 * 1024 && heap.usedSize() == 8 * 1024);
140     // we expect the heap to grow here
141     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
142     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 48 * 1024);
143     heap.free(alloc3);
144     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 45 * 1024);
145     // heap should not grow here (first subheap has exactly enough room)
146     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc3));
147     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 85 * 1024);
148     // heap should not grow here (second subheap has room)
149     REPORTER_ASSERT(reporter, heap.alloc(22 * 1024, kAlignment, kMemType, kHeapIndex, &alloc2));
150     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 107 * 1024);
151     heap.free(alloc1);
152     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 102 * 1024);
153     heap.free(alloc0);
154     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 62 * 1024);
155     heap.free(alloc2);
156     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 40 * 1024);
157     heap.free(alloc3);
158     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 0 * 1024);
159     // heap should not grow here (allocating more than subheap size)
160     REPORTER_ASSERT(reporter, heap.alloc(128 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
161     REPORTER_ASSERT(reporter, 0 == alloc0.fSize);
162     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 0 * 1024);
163     heap.free(alloc0);
164     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
165     REPORTER_ASSERT(reporter, heap.allocSize() == 128 * 1024 && heap.usedSize() == 24 * 1024);
166     // heap should alloc a new subheap because the memory type is different
167     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType+1, kHeapIndex, &alloc1));
168     REPORTER_ASSERT(reporter, heap.allocSize() == 192 * 1024 && heap.usedSize() == 48 * 1024);
169     // heap should alloc a new subheap because the alignment is different
170     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, 128, kMemType, kHeapIndex, &alloc2));
171     REPORTER_ASSERT(reporter, heap.allocSize() == 256 * 1024 && heap.usedSize() == 72 * 1024);
172     heap.free(alloc2);
173     heap.free(alloc0);
174     heap.free(alloc1);
175     REPORTER_ASSERT(reporter, heap.allocSize() == 256 * 1024 && heap.usedSize() == 0 * 1024);
176 }
177 
singlealloc_test(skiatest::Reporter * reporter,GrContext * context)178 void singlealloc_test(skiatest::Reporter* reporter, GrContext* context) {
179     GrVkGpu* gpu = static_cast<GrVkGpu*>(context->getGpu());
180 
181     // memtype/heap index don't matter, we're just testing the allocation algorithm so we'll use 0
182     GrVkHeap heap(gpu, GrVkHeap::kSingleAlloc_Strategy, 64 * 1024);
183     GrVkAlloc alloc0, alloc1, alloc2, alloc3;
184     const VkDeviceSize kAlignment = 64;
185     const uint32_t kMemType = 0;
186     const uint32_t kHeapIndex = 0;
187 
188     REPORTER_ASSERT(reporter, heap.allocSize() == 0 && heap.usedSize() == 0);
189 
190     // make a few allocations
191     REPORTER_ASSERT(reporter, heap.alloc(49 * 1024 - 3, kAlignment, kMemType, kHeapIndex, &alloc0));
192     REPORTER_ASSERT(reporter, heap.alloc(5 * 1024 - 37, kAlignment, kMemType, kHeapIndex, &alloc1));
193     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 11, kAlignment, kMemType, kHeapIndex, &alloc2));
194     REPORTER_ASSERT(reporter, heap.alloc(3 * 1024 - 29, kAlignment, kMemType, kHeapIndex, &alloc3));
195     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 72 * 1024);
196     heap.free(alloc0);
197     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 23 * 1024);
198     heap.free(alloc2);
199     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 8 * 1024);
200     // heap should not grow here (first subheap has room)
201     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
202     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 48 * 1024);
203     heap.free(alloc3);
204     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 45 * 1024);
205     // check for exact fit -- heap should not grow here (third subheap has room)
206     REPORTER_ASSERT(reporter, heap.alloc(15 * 1024 - 63, kAlignment, kMemType, kHeapIndex, &alloc2));
207     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 60 * 1024);
208     heap.free(alloc2);
209     REPORTER_ASSERT(reporter, heap.allocSize() == 72 * 1024 && heap.usedSize() == 45 * 1024);
210     // heap should grow here (no subheap has room)
211     REPORTER_ASSERT(reporter, heap.alloc(40 * 1024, kAlignment, kMemType, kHeapIndex, &alloc3));
212     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 85 * 1024);
213     heap.free(alloc1);
214     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 80 * 1024);
215     heap.free(alloc0);
216     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 40 * 1024);
217     heap.free(alloc3);
218     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 0 * 1024);
219     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType, kHeapIndex, &alloc0));
220     REPORTER_ASSERT(reporter, heap.allocSize() == 112 * 1024 && heap.usedSize() == 24 * 1024);
221     // heap should alloc a new subheap because the memory type is different
222     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, kAlignment, kMemType + 1, kHeapIndex, &alloc1));
223     REPORTER_ASSERT(reporter, heap.allocSize() == 136 * 1024 && heap.usedSize() == 48 * 1024);
224     // heap should alloc a new subheap because the alignment is different
225     REPORTER_ASSERT(reporter, heap.alloc(24 * 1024, 128, kMemType, kHeapIndex, &alloc2));
226     REPORTER_ASSERT(reporter, heap.allocSize() == 160 * 1024 && heap.usedSize() == 72 * 1024);
227     heap.free(alloc1);
228     heap.free(alloc2);
229     heap.free(alloc0);
230     REPORTER_ASSERT(reporter, heap.allocSize() == 160 * 1024 && heap.usedSize() == 0 * 1024);
231 }
232 
DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkHeapTests,reporter,ctxInfo)233 DEF_GPUTEST_FOR_VULKAN_CONTEXT(VkHeapTests, reporter, ctxInfo) {
234     subheap_test(reporter, ctxInfo.grContext());
235     suballoc_test(reporter, ctxInfo.grContext());
236     singlealloc_test(reporter, ctxInfo.grContext());
237 }
238 
239 #endif
240