1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrBatchAtlas.h"
9 #include "GrBatchFlushState.h"
10 #include "GrRectanizer.h"
11 #include "GrTracing.h"
12 #include "GrVertexBuffer.h"
13
14 ////////////////////////////////////////////////////////////////////////////////
15
BatchPlot(int index,uint64_t genID,int offX,int offY,int width,int height,GrPixelConfig config)16 GrBatchAtlas::BatchPlot::BatchPlot(int index, uint64_t genID, int offX, int offY, int width,
17 int height, GrPixelConfig config)
18 : fLastUpload(0)
19 , fLastUse(0)
20 , fIndex(index)
21 , fGenID(genID)
22 , fID(CreateId(fIndex, fGenID))
23 , fData(nullptr)
24 , fWidth(width)
25 , fHeight(height)
26 , fX(offX)
27 , fY(offY)
28 , fRects(nullptr)
29 , fOffset(SkIPoint16::Make(fX * fWidth, fY * fHeight))
30 , fConfig(config)
31 , fBytesPerPixel(GrBytesPerPixel(config))
32 #ifdef SK_DEBUG
33 , fDirty(false)
34 #endif
35 {
36 fDirtyRect.setEmpty();
37 }
38
~BatchPlot()39 GrBatchAtlas::BatchPlot::~BatchPlot() {
40 sk_free(fData);
41 delete fRects;
42 }
43
addSubImage(int width,int height,const void * image,SkIPoint16 * loc)44 bool GrBatchAtlas::BatchPlot::addSubImage(int width, int height, const void* image,
45 SkIPoint16* loc) {
46 SkASSERT(width <= fWidth && height <= fHeight);
47
48 if (!fRects) {
49 fRects = GrRectanizer::Factory(fWidth, fHeight);
50 }
51
52 if (!fRects->addRect(width, height, loc)) {
53 return false;
54 }
55
56 if (!fData) {
57 fData = reinterpret_cast<unsigned char*>(sk_calloc_throw(fBytesPerPixel * fWidth *
58 fHeight));
59 }
60 size_t rowBytes = width * fBytesPerPixel;
61 const unsigned char* imagePtr = (const unsigned char*)image;
62 // point ourselves at the right starting spot
63 unsigned char* dataPtr = fData;
64 dataPtr += fBytesPerPixel * fWidth * loc->fY;
65 dataPtr += fBytesPerPixel * loc->fX;
66 // copy into the data buffer
67 for (int i = 0; i < height; ++i) {
68 memcpy(dataPtr, imagePtr, rowBytes);
69 dataPtr += fBytesPerPixel * fWidth;
70 imagePtr += rowBytes;
71 }
72
73 fDirtyRect.join(loc->fX, loc->fY, loc->fX + width, loc->fY + height);
74
75 loc->fX += fOffset.fX;
76 loc->fY += fOffset.fY;
77 SkDEBUGCODE(fDirty = true;)
78
79 return true;
80 }
81
uploadToTexture(GrBatchUploader::TextureUploader * uploader,GrTexture * texture)82 void GrBatchAtlas::BatchPlot::uploadToTexture(GrBatchUploader::TextureUploader* uploader,
83 GrTexture* texture) {
84 // We should only be issuing uploads if we are in fact dirty
85 SkASSERT(fDirty && fData && texture);
86 TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("skia.gpu"), "GrBatchPlot::uploadToTexture");
87 size_t rowBytes = fBytesPerPixel * fWidth;
88 const unsigned char* dataPtr = fData;
89 dataPtr += rowBytes * fDirtyRect.fTop;
90 dataPtr += fBytesPerPixel * fDirtyRect.fLeft;
91 uploader->writeTexturePixels(texture,
92 fOffset.fX + fDirtyRect.fLeft, fOffset.fY + fDirtyRect.fTop,
93 fDirtyRect.width(), fDirtyRect.height(),
94 fConfig, dataPtr, rowBytes);
95 fDirtyRect.setEmpty();
96 SkDEBUGCODE(fDirty = false;)
97 }
98
resetRects()99 void GrBatchAtlas::BatchPlot::resetRects() {
100 if (fRects) {
101 fRects->reset();
102 }
103
104 fGenID++;
105 fID = CreateId(fIndex, fGenID);
106
107 // zero out the plot
108 if (fData) {
109 sk_bzero(fData, fBytesPerPixel * fWidth * fHeight);
110 }
111
112 fDirtyRect.setEmpty();
113 SkDEBUGCODE(fDirty = false;)
114 }
115
116 ////////////////////////////////////////////////////////////////////////////////
117
118 class GrPlotUploader : public GrBatchUploader {
119 public:
GrPlotUploader(GrBatchAtlas::BatchPlot * plot,GrTexture * texture)120 GrPlotUploader(GrBatchAtlas::BatchPlot* plot, GrTexture* texture)
121 : INHERITED(plot->lastUploadToken())
122 , fPlot(SkRef(plot))
123 , fTexture(texture) {
124 SkASSERT(plot);
125 }
126
upload(TextureUploader * uploader)127 void upload(TextureUploader* uploader) override {
128 fPlot->uploadToTexture(uploader, fTexture);
129 }
130
131 private:
132 SkAutoTUnref<GrBatchAtlas::BatchPlot> fPlot;
133 GrTexture* fTexture;
134
135 typedef GrBatchUploader INHERITED;
136 };
137
138 ///////////////////////////////////////////////////////////////////////////////
139
GrBatchAtlas(GrTexture * texture,int numPlotsX,int numPlotsY)140 GrBatchAtlas::GrBatchAtlas(GrTexture* texture, int numPlotsX, int numPlotsY)
141 : fTexture(texture)
142 , fAtlasGeneration(kInvalidAtlasGeneration + 1) {
143
144 int plotWidth = texture->width() / numPlotsX;
145 int plotHeight = texture->height() / numPlotsY;
146 SkASSERT(numPlotsX * numPlotsY <= BulkUseTokenUpdater::kMaxPlots);
147 SkASSERT(plotWidth * numPlotsX == texture->width());
148 SkASSERT(plotHeight * numPlotsY == texture->height());
149
150 SkDEBUGCODE(fNumPlots = numPlotsX * numPlotsY;)
151
152 // We currently do not support compressed atlases...
153 SkASSERT(!GrPixelConfigIsCompressed(texture->desc().fConfig));
154
155 // set up allocated plots
156 fPlotArray = new SkAutoTUnref<BatchPlot>[numPlotsX * numPlotsY];
157
158 SkAutoTUnref<BatchPlot>* currPlot = fPlotArray;
159 for (int y = numPlotsY - 1, r = 0; y >= 0; --y, ++r) {
160 for (int x = numPlotsX - 1, c = 0; x >= 0; --x, ++c) {
161 uint32_t index = r * numPlotsX + c;
162 currPlot->reset(new BatchPlot(index, 1, x, y, plotWidth, plotHeight,
163 texture->desc().fConfig));
164
165 // build LRU list
166 fPlotList.addToHead(currPlot->get());
167 ++currPlot;
168 }
169 }
170 }
171
~GrBatchAtlas()172 GrBatchAtlas::~GrBatchAtlas() {
173 SkSafeUnref(fTexture);
174 delete[] fPlotArray;
175 }
176
processEviction(AtlasID id)177 void GrBatchAtlas::processEviction(AtlasID id) {
178 for (int i = 0; i < fEvictionCallbacks.count(); i++) {
179 (*fEvictionCallbacks[i].fFunc)(id, fEvictionCallbacks[i].fData);
180 }
181 }
182
updatePlot(GrDrawBatch::Target * target,AtlasID * id,BatchPlot * plot)183 inline void GrBatchAtlas::updatePlot(GrDrawBatch::Target* target, AtlasID* id, BatchPlot* plot) {
184 this->makeMRU(plot);
185
186 // If our most recent upload has already occurred then we have to insert a new
187 // upload. Otherwise, we already have a scheduled upload that hasn't yet ocurred.
188 // This new update will piggy back on that previously scheduled update.
189 if (target->hasTokenBeenFlushed(plot->lastUploadToken())) {
190 plot->setLastUploadToken(target->asapToken());
191 SkAutoTUnref<GrPlotUploader> uploader(new GrPlotUploader(plot, fTexture));
192 target->upload(uploader);
193 }
194 *id = plot->id();
195 }
196
addToAtlas(AtlasID * id,GrDrawBatch::Target * batchTarget,int width,int height,const void * image,SkIPoint16 * loc)197 bool GrBatchAtlas::addToAtlas(AtlasID* id, GrDrawBatch::Target* batchTarget,
198 int width, int height, const void* image, SkIPoint16* loc) {
199 // We should already have a texture, TODO clean this up
200 SkASSERT(fTexture);
201
202 // now look through all allocated plots for one we can share, in Most Recently Refed order
203 GrBatchPlotList::Iter plotIter;
204 plotIter.init(fPlotList, GrBatchPlotList::Iter::kHead_IterStart);
205 BatchPlot* plot;
206 while ((plot = plotIter.get())) {
207 SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == plot->bpp());
208 if (plot->addSubImage(width, height, image, loc)) {
209 this->updatePlot(batchTarget, id, plot);
210 return true;
211 }
212 plotIter.next();
213 }
214
215 // If the above fails, then see if the least recently refed plot has already been flushed to the
216 // gpu
217 plot = fPlotList.tail();
218 SkASSERT(plot);
219 if (batchTarget->hasTokenBeenFlushed(plot->lastUseToken())) {
220 this->processEviction(plot->id());
221 plot->resetRects();
222 SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == plot->bpp());
223 SkDEBUGCODE(bool verify = )plot->addSubImage(width, height, image, loc);
224 SkASSERT(verify);
225 this->updatePlot(batchTarget, id, plot);
226 fAtlasGeneration++;
227 return true;
228 }
229
230 // The least recently used plot hasn't been flushed to the gpu yet, however, if we have flushed
231 // it to the batch target than we can reuse it. Our last use token is guaranteed to be less
232 // than or equal to the current token. If its 'less than' the current token, than we can spin
233 // off the plot (ie let the batch target manage it) and create a new plot in its place in our
234 // array. If it is equal to the currentToken, then the caller has to flush draws to the batch
235 // target so we can spin off the plot
236 if (plot->lastUseToken() == batchTarget->currentToken()) {
237 return false;
238 }
239
240 SkASSERT(plot->lastUseToken() < batchTarget->currentToken());
241 SkASSERT(!batchTarget->hasTokenBeenFlushed(batchTarget->currentToken()));
242
243 SkASSERT(!plot->unique()); // The GrPlotUpdater should have a ref too
244
245 this->processEviction(plot->id());
246 fPlotList.remove(plot);
247 SkAutoTUnref<BatchPlot>& newPlot = fPlotArray[plot->index()];
248 newPlot.reset(plot->clone());
249
250 fPlotList.addToHead(newPlot.get());
251 SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == newPlot->bpp());
252 SkDEBUGCODE(bool verify = )newPlot->addSubImage(width, height, image, loc);
253 SkASSERT(verify);
254
255 // Note that this plot will be uploaded inline with the draws whereas the
256 // one it displaced most likely was uploaded asap.
257 newPlot->setLastUploadToken(batchTarget->currentToken());
258 SkAutoTUnref<GrPlotUploader> uploader(new GrPlotUploader(newPlot, fTexture));
259 batchTarget->upload(uploader);
260 *id = newPlot->id();
261
262 fAtlasGeneration++;
263 return true;
264 }
265