• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1/*
2 * Copyright 2018 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "include/private/GrTypesPriv.h"
9#include "src/gpu/GrGpuResourcePriv.h"
10#include "src/gpu/mtl/GrMtlBuffer.h"
11#include "src/gpu/mtl/GrMtlCommandBuffer.h"
12#include "src/gpu/mtl/GrMtlGpu.h"
13
14#if !__has_feature(objc_arc)
15#error This file must be compiled with Arc. Use -fobjc-arc flag
16#endif
17
18#ifdef SK_DEBUG
19#define VALIDATE() this->validate()
20#else
21#define VALIDATE() do {} while(false)
22#endif
23
24sk_sp<GrMtlBuffer> GrMtlBuffer::Make(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType,
25                                     GrAccessPattern accessPattern, const void* data) {
26    sk_sp<GrMtlBuffer> buffer(new GrMtlBuffer(gpu, size, intendedType, accessPattern));
27    if (data && !buffer->onUpdateData(data, size)) {
28        return nullptr;
29    }
30    return buffer;
31}
32
33GrMtlBuffer::GrMtlBuffer(GrMtlGpu* gpu, size_t size, GrGpuBufferType intendedType,
34                         GrAccessPattern accessPattern)
35        : INHERITED(gpu, size, intendedType, accessPattern)
36        , fIsDynamic(accessPattern != kStatic_GrAccessPattern)
37        , fOffset(0) {
38    // We'll allocate dynamic buffers when we map them, below.
39    if (!fIsDynamic) {
40        // TODO: newBufferWithBytes: used to work with StorageModePrivate buffers -- seems like
41        // a bug that it no longer does. If that changes we could use that to pre-load the buffer.
42        fMtlBuffer = size == 0 ? nil :
43                [gpu->device() newBufferWithLength: size
44                                           options: MTLResourceStorageModePrivate];
45    }
46    this->registerWithCache(SkBudgeted::kYes);
47    VALIDATE();
48}
49
50GrMtlBuffer::~GrMtlBuffer() {
51    SkASSERT(fMtlBuffer == nil);
52    SkASSERT(fMappedBuffer == nil);
53    SkASSERT(fMapPtr == nullptr);
54}
55
56bool GrMtlBuffer::onUpdateData(const void* src, size_t srcInBytes) {
57    if (!fIsDynamic) {
58        if (fMtlBuffer == nil) {
59            return false;
60        }
61        if (srcInBytes > fMtlBuffer.length) {
62            return false;
63        }
64    }
65    VALIDATE();
66
67    this->internalMap(srcInBytes);
68    if (fMapPtr == nil) {
69        return false;
70    }
71    SkASSERT(fMappedBuffer);
72    if (!fIsDynamic) {
73        SkASSERT(srcInBytes == fMappedBuffer.length);
74    }
75    memcpy(fMapPtr, src, srcInBytes);
76    this->internalUnmap(srcInBytes);
77
78    VALIDATE();
79    return true;
80}
81
82inline GrMtlGpu* GrMtlBuffer::mtlGpu() const {
83    SkASSERT(!this->wasDestroyed());
84    return static_cast<GrMtlGpu*>(this->getGpu());
85}
86
87void GrMtlBuffer::onAbandon() {
88    fMtlBuffer = nil;
89    fMappedBuffer = nil;
90    fMapPtr = nullptr;
91    VALIDATE();
92    INHERITED::onAbandon();
93}
94
95void GrMtlBuffer::onRelease() {
96    if (!this->wasDestroyed()) {
97        VALIDATE();
98        fMtlBuffer = nil;
99        fMappedBuffer = nil;
100        fMapPtr = nullptr;
101        VALIDATE();
102    }
103    INHERITED::onRelease();
104}
105
106void GrMtlBuffer::internalMap(size_t sizeInBytes) {
107    if (this->wasDestroyed()) {
108        return;
109    }
110    VALIDATE();
111    SkASSERT(!this->isMapped());
112    if (fIsDynamic) {
113        fMtlBuffer = this->mtlGpu()->resourceProvider().getDynamicBuffer(sizeInBytes, &fOffset);
114        fMappedBuffer = fMtlBuffer;
115        fMapPtr = static_cast<char*>(fMtlBuffer.contents) + fOffset;
116    } else {
117        SkASSERT(fMtlBuffer);
118        SkASSERT(fMappedBuffer == nil);
119        fMappedBuffer =
120                [this->mtlGpu()->device() newBufferWithLength: sizeInBytes
121                                                      options: MTLResourceStorageModeShared];
122        fMapPtr = fMappedBuffer.contents;
123    }
124    VALIDATE();
125}
126
127void GrMtlBuffer::internalUnmap(size_t sizeInBytes) {
128    SkASSERT(fMtlBuffer);
129    if (this->wasDestroyed()) {
130        return;
131    }
132    VALIDATE();
133    SkASSERT(this->isMapped());
134    if (fMtlBuffer == nil) {
135        fMappedBuffer = nil;
136        fMapPtr = nullptr;
137        return;
138    }
139    if (fIsDynamic) {
140#ifdef SK_BUILD_FOR_MAC
141        // TODO: need to make sure offset and size have valid alignments.
142        [fMtlBuffer didModifyRange: NSMakeRange(fOffset, sizeInBytes)];
143#endif
144    } else {
145        GrMtlCommandBuffer* cmdBuffer = this->mtlGpu()->commandBuffer();
146        id<MTLBlitCommandEncoder> blitCmdEncoder = cmdBuffer->getBlitCommandEncoder();
147        [blitCmdEncoder copyFromBuffer: fMappedBuffer
148                          sourceOffset: 0
149                              toBuffer: fMtlBuffer
150                     destinationOffset: 0
151                                  size: sizeInBytes];
152    }
153    fMappedBuffer = nil;
154    fMapPtr = nullptr;
155}
156
157void GrMtlBuffer::onMap() {
158    this->internalMap(this->size());
159}
160
161void GrMtlBuffer::onUnmap() {
162    this->internalUnmap(this->size());
163}
164
165#ifdef SK_DEBUG
166void GrMtlBuffer::validate() const {
167    SkASSERT(fMtlBuffer == nil ||
168             this->intendedType() == GrGpuBufferType::kVertex ||
169             this->intendedType() == GrGpuBufferType::kIndex ||
170             this->intendedType() == GrGpuBufferType::kXferCpuToGpu ||
171             this->intendedType() == GrGpuBufferType::kXferGpuToCpu);
172    SkASSERT(fMappedBuffer == nil || fMtlBuffer == nil ||
173             fMappedBuffer.length <= fMtlBuffer.length);
174}
175#endif
176