• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 The Dawn Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "dawn_native/null/DeviceNull.h"
16 
17 #include "dawn_native/BackendConnection.h"
18 #include "dawn_native/Commands.h"
19 #include "dawn_native/DynamicUploader.h"
20 
21 #include <spirv-cross/spirv_cross.hpp>
22 
23 namespace dawn_native { namespace null {
24 
25     // Implementation of pre-Device objects: the null adapter, null backend connection and Connect()
26 
27     class Adapter : public AdapterBase {
28       public:
Adapter(InstanceBase * instance)29         Adapter(InstanceBase* instance) : AdapterBase(instance, BackendType::Null) {
30             mPCIInfo.name = "Null backend";
31             mDeviceType = DeviceType::CPU;
32         }
33         virtual ~Adapter() = default;
34 
35       private:
CreateDeviceImpl(const DeviceDescriptor * descriptor)36         ResultOrError<DeviceBase*> CreateDeviceImpl(const DeviceDescriptor* descriptor) override {
37             return {new Device(this, descriptor)};
38         }
39     };
40 
41     class Backend : public BackendConnection {
42       public:
Backend(InstanceBase * instance)43         Backend(InstanceBase* instance) : BackendConnection(instance, BackendType::Null) {
44         }
45 
DiscoverDefaultAdapters()46         std::vector<std::unique_ptr<AdapterBase>> DiscoverDefaultAdapters() override {
47             // There is always a single Null adapter because it is purely CPU based and doesn't
48             // depend on the system.
49             std::vector<std::unique_ptr<AdapterBase>> adapters;
50             adapters.push_back(std::make_unique<Adapter>(GetInstance()));
51             return adapters;
52         }
53     };
54 
Connect(InstanceBase * instance)55     BackendConnection* Connect(InstanceBase* instance) {
56         return new Backend(instance);
57     }
58 
59     struct CopyFromStagingToBufferOperation : PendingOperation {
Executedawn_native::null::CopyFromStagingToBufferOperation60         virtual void Execute() {
61             destination->CopyFromStaging(staging, sourceOffset, destinationOffset, size);
62         }
63 
64         StagingBufferBase* staging;
65         Ref<Buffer> destination;
66         uint64_t sourceOffset;
67         uint64_t destinationOffset;
68         uint64_t size;
69     };
70 
71     // Device
72 
Device(Adapter * adapter,const DeviceDescriptor * descriptor)73     Device::Device(Adapter* adapter, const DeviceDescriptor* descriptor)
74         : DeviceBase(adapter, descriptor) {
75         // Apply toggle overrides if necessary for test
76         if (descriptor != nullptr) {
77             ApplyToggleOverrides(descriptor);
78         }
79     }
80 
~Device()81     Device::~Device() {
82         mDynamicUploader = nullptr;
83 
84         mPendingOperations.clear();
85         ASSERT(mMemoryUsage == 0);
86     }
87 
CreateBindGroupImpl(const BindGroupDescriptor * descriptor)88     ResultOrError<BindGroupBase*> Device::CreateBindGroupImpl(
89         const BindGroupDescriptor* descriptor) {
90         return new BindGroup(this, descriptor);
91     }
CreateBindGroupLayoutImpl(const BindGroupLayoutDescriptor * descriptor)92     ResultOrError<BindGroupLayoutBase*> Device::CreateBindGroupLayoutImpl(
93         const BindGroupLayoutDescriptor* descriptor) {
94         return new BindGroupLayout(this, descriptor);
95     }
CreateBufferImpl(const BufferDescriptor * descriptor)96     ResultOrError<BufferBase*> Device::CreateBufferImpl(const BufferDescriptor* descriptor) {
97         DAWN_TRY(IncrementMemoryUsage(descriptor->size));
98         return new Buffer(this, descriptor);
99     }
CreateCommandBuffer(CommandEncoderBase * encoder,const CommandBufferDescriptor * descriptor)100     CommandBufferBase* Device::CreateCommandBuffer(CommandEncoderBase* encoder,
101                                                    const CommandBufferDescriptor* descriptor) {
102         return new CommandBuffer(encoder, descriptor);
103     }
CreateComputePipelineImpl(const ComputePipelineDescriptor * descriptor)104     ResultOrError<ComputePipelineBase*> Device::CreateComputePipelineImpl(
105         const ComputePipelineDescriptor* descriptor) {
106         return new ComputePipeline(this, descriptor);
107     }
CreatePipelineLayoutImpl(const PipelineLayoutDescriptor * descriptor)108     ResultOrError<PipelineLayoutBase*> Device::CreatePipelineLayoutImpl(
109         const PipelineLayoutDescriptor* descriptor) {
110         return new PipelineLayout(this, descriptor);
111     }
CreateQueueImpl()112     ResultOrError<QueueBase*> Device::CreateQueueImpl() {
113         return new Queue(this);
114     }
CreateRenderPipelineImpl(const RenderPipelineDescriptor * descriptor)115     ResultOrError<RenderPipelineBase*> Device::CreateRenderPipelineImpl(
116         const RenderPipelineDescriptor* descriptor) {
117         return new RenderPipeline(this, descriptor);
118     }
CreateSamplerImpl(const SamplerDescriptor * descriptor)119     ResultOrError<SamplerBase*> Device::CreateSamplerImpl(const SamplerDescriptor* descriptor) {
120         return new Sampler(this, descriptor);
121     }
CreateShaderModuleImpl(const ShaderModuleDescriptor * descriptor)122     ResultOrError<ShaderModuleBase*> Device::CreateShaderModuleImpl(
123         const ShaderModuleDescriptor* descriptor) {
124         auto module = new ShaderModule(this, descriptor);
125 
126         spirv_cross::Compiler compiler(descriptor->code, descriptor->codeSize);
127         module->ExtractSpirvInfo(compiler);
128 
129         return module;
130     }
CreateSwapChainImpl(const SwapChainDescriptor * descriptor)131     ResultOrError<SwapChainBase*> Device::CreateSwapChainImpl(
132         const SwapChainDescriptor* descriptor) {
133         return new SwapChain(this, descriptor);
134     }
CreateTextureImpl(const TextureDescriptor * descriptor)135     ResultOrError<TextureBase*> Device::CreateTextureImpl(const TextureDescriptor* descriptor) {
136         return new Texture(this, descriptor, TextureBase::TextureState::OwnedInternal);
137     }
CreateTextureViewImpl(TextureBase * texture,const TextureViewDescriptor * descriptor)138     ResultOrError<TextureViewBase*> Device::CreateTextureViewImpl(
139         TextureBase* texture,
140         const TextureViewDescriptor* descriptor) {
141         return new TextureView(texture, descriptor);
142     }
143 
CreateStagingBuffer(size_t size)144     ResultOrError<std::unique_ptr<StagingBufferBase>> Device::CreateStagingBuffer(size_t size) {
145         std::unique_ptr<StagingBufferBase> stagingBuffer =
146             std::make_unique<StagingBuffer>(size, this);
147         return std::move(stagingBuffer);
148     }
149 
CopyFromStagingToBuffer(StagingBufferBase * source,uint64_t sourceOffset,BufferBase * destination,uint64_t destinationOffset,uint64_t size)150     MaybeError Device::CopyFromStagingToBuffer(StagingBufferBase* source,
151                                                uint64_t sourceOffset,
152                                                BufferBase* destination,
153                                                uint64_t destinationOffset,
154                                                uint64_t size) {
155         auto operation = std::make_unique<CopyFromStagingToBufferOperation>();
156         operation->staging = source;
157         operation->destination = ToBackend(destination);
158         operation->sourceOffset = sourceOffset;
159         operation->destinationOffset = destinationOffset;
160         operation->size = size;
161 
162         AddPendingOperation(std::move(operation));
163 
164         return {};
165     }
166 
IncrementMemoryUsage(size_t bytes)167     MaybeError Device::IncrementMemoryUsage(size_t bytes) {
168         static_assert(kMaxMemoryUsage <= std::numeric_limits<size_t>::max() / 2, "");
169         if (bytes > kMaxMemoryUsage || mMemoryUsage + bytes > kMaxMemoryUsage) {
170             return DAWN_CONTEXT_LOST_ERROR("Out of memory.");
171         }
172         mMemoryUsage += bytes;
173         return {};
174     }
175 
DecrementMemoryUsage(size_t bytes)176     void Device::DecrementMemoryUsage(size_t bytes) {
177         ASSERT(mMemoryUsage >= bytes);
178         mMemoryUsage -= bytes;
179     }
180 
GetCompletedCommandSerial() const181     Serial Device::GetCompletedCommandSerial() const {
182         return mCompletedSerial;
183     }
184 
GetLastSubmittedCommandSerial() const185     Serial Device::GetLastSubmittedCommandSerial() const {
186         return mLastSubmittedSerial;
187     }
188 
GetPendingCommandSerial() const189     Serial Device::GetPendingCommandSerial() const {
190         return mLastSubmittedSerial + 1;
191     }
192 
TickImpl()193     void Device::TickImpl() {
194         SubmitPendingOperations();
195     }
196 
AddPendingOperation(std::unique_ptr<PendingOperation> operation)197     void Device::AddPendingOperation(std::unique_ptr<PendingOperation> operation) {
198         mPendingOperations.emplace_back(std::move(operation));
199     }
SubmitPendingOperations()200     void Device::SubmitPendingOperations() {
201         for (auto& operation : mPendingOperations) {
202             operation->Execute();
203         }
204         mPendingOperations.clear();
205 
206         mCompletedSerial = mLastSubmittedSerial;
207         mLastSubmittedSerial++;
208     }
209 
210     // Buffer
211 
212     struct BufferMapOperation : PendingOperation {
Executedawn_native::null::BufferMapOperation213         virtual void Execute() {
214             buffer->MapOperationCompleted(serial, ptr, isWrite);
215         }
216 
217         Ref<Buffer> buffer;
218         void* ptr;
219         uint32_t serial;
220         bool isWrite;
221     };
222 
Buffer(Device * device,const BufferDescriptor * descriptor)223     Buffer::Buffer(Device* device, const BufferDescriptor* descriptor)
224         : BufferBase(device, descriptor) {
225         mBackingData = std::unique_ptr<uint8_t[]>(new uint8_t[GetSize()]);
226     }
227 
~Buffer()228     Buffer::~Buffer() {
229         DestroyInternal();
230         ToBackend(GetDevice())->DecrementMemoryUsage(GetSize());
231     }
232 
IsMapWritable() const233     bool Buffer::IsMapWritable() const {
234         // Only return true for mappable buffers so we can test cases that need / don't need a
235         // staging buffer.
236         return (GetUsage() & (dawn::BufferUsageBit::MapRead | dawn::BufferUsageBit::MapWrite)) != 0;
237     }
238 
MapAtCreationImpl(uint8_t ** mappedPointer)239     MaybeError Buffer::MapAtCreationImpl(uint8_t** mappedPointer) {
240         *mappedPointer = mBackingData.get();
241         return {};
242     }
243 
MapOperationCompleted(uint32_t serial,void * ptr,bool isWrite)244     void Buffer::MapOperationCompleted(uint32_t serial, void* ptr, bool isWrite) {
245         if (isWrite) {
246             CallMapWriteCallback(serial, DAWN_BUFFER_MAP_ASYNC_STATUS_SUCCESS, ptr, GetSize());
247         } else {
248             CallMapReadCallback(serial, DAWN_BUFFER_MAP_ASYNC_STATUS_SUCCESS, ptr, GetSize());
249         }
250     }
251 
CopyFromStaging(StagingBufferBase * staging,uint64_t sourceOffset,uint64_t destinationOffset,uint64_t size)252     void Buffer::CopyFromStaging(StagingBufferBase* staging,
253                                  uint64_t sourceOffset,
254                                  uint64_t destinationOffset,
255                                  uint64_t size) {
256         uint8_t* ptr = reinterpret_cast<uint8_t*>(staging->GetMappedPointer());
257         memcpy(mBackingData.get() + destinationOffset, ptr + sourceOffset, size);
258     }
259 
SetSubDataImpl(uint32_t start,uint32_t count,const void * data)260     MaybeError Buffer::SetSubDataImpl(uint32_t start, uint32_t count, const void* data) {
261         ASSERT(start + count <= GetSize());
262         ASSERT(mBackingData);
263         memcpy(mBackingData.get() + start, data, count);
264         return {};
265     }
266 
MapReadAsyncImpl(uint32_t serial)267     void Buffer::MapReadAsyncImpl(uint32_t serial) {
268         MapAsyncImplCommon(serial, false);
269     }
270 
MapWriteAsyncImpl(uint32_t serial)271     void Buffer::MapWriteAsyncImpl(uint32_t serial) {
272         MapAsyncImplCommon(serial, true);
273     }
274 
MapAsyncImplCommon(uint32_t serial,bool isWrite)275     void Buffer::MapAsyncImplCommon(uint32_t serial, bool isWrite) {
276         ASSERT(mBackingData);
277 
278         auto operation = std::make_unique<BufferMapOperation>();
279         operation->buffer = this;
280         operation->ptr = mBackingData.get();
281         operation->serial = serial;
282         operation->isWrite = isWrite;
283 
284         ToBackend(GetDevice())->AddPendingOperation(std::move(operation));
285     }
286 
UnmapImpl()287     void Buffer::UnmapImpl() {
288     }
289 
DestroyImpl()290     void Buffer::DestroyImpl() {
291     }
292 
293     // CommandBuffer
294 
CommandBuffer(CommandEncoderBase * encoder,const CommandBufferDescriptor * descriptor)295     CommandBuffer::CommandBuffer(CommandEncoderBase* encoder,
296                                  const CommandBufferDescriptor* descriptor)
297         : CommandBufferBase(encoder, descriptor), mCommands(encoder->AcquireCommands()) {
298     }
299 
~CommandBuffer()300     CommandBuffer::~CommandBuffer() {
301         FreeCommands(&mCommands);
302     }
303 
304     // Queue
305 
Queue(Device * device)306     Queue::Queue(Device* device) : QueueBase(device) {
307     }
308 
~Queue()309     Queue::~Queue() {
310     }
311 
SubmitImpl(uint32_t,CommandBufferBase * const *)312     void Queue::SubmitImpl(uint32_t, CommandBufferBase* const*) {
313         ToBackend(GetDevice())->SubmitPendingOperations();
314     }
315 
316     // SwapChain
317 
SwapChain(Device * device,const SwapChainDescriptor * descriptor)318     SwapChain::SwapChain(Device* device, const SwapChainDescriptor* descriptor)
319         : SwapChainBase(device, descriptor) {
320         const auto& im = GetImplementation();
321         im.Init(im.userData, nullptr);
322     }
323 
~SwapChain()324     SwapChain::~SwapChain() {
325     }
326 
GetNextTextureImpl(const TextureDescriptor * descriptor)327     TextureBase* SwapChain::GetNextTextureImpl(const TextureDescriptor* descriptor) {
328         return GetDevice()->CreateTexture(descriptor);
329     }
330 
OnBeforePresent(TextureBase *)331     void SwapChain::OnBeforePresent(TextureBase*) {
332     }
333 
334     // NativeSwapChainImpl
335 
Init(WSIContext * context)336     void NativeSwapChainImpl::Init(WSIContext* context) {
337     }
338 
Configure(DawnTextureFormat format,DawnTextureUsageBit,uint32_t width,uint32_t height)339     DawnSwapChainError NativeSwapChainImpl::Configure(DawnTextureFormat format,
340                                                       DawnTextureUsageBit,
341                                                       uint32_t width,
342                                                       uint32_t height) {
343         return DAWN_SWAP_CHAIN_NO_ERROR;
344     }
345 
GetNextTexture(DawnSwapChainNextTexture * nextTexture)346     DawnSwapChainError NativeSwapChainImpl::GetNextTexture(DawnSwapChainNextTexture* nextTexture) {
347         return DAWN_SWAP_CHAIN_NO_ERROR;
348     }
349 
Present()350     DawnSwapChainError NativeSwapChainImpl::Present() {
351         return DAWN_SWAP_CHAIN_NO_ERROR;
352     }
353 
GetPreferredFormat() const354     dawn::TextureFormat NativeSwapChainImpl::GetPreferredFormat() const {
355         return dawn::TextureFormat::RGBA8Unorm;
356     }
357 
358     // StagingBuffer
359 
StagingBuffer(size_t size,Device * device)360     StagingBuffer::StagingBuffer(size_t size, Device* device)
361         : StagingBufferBase(size), mDevice(device) {
362     }
363 
~StagingBuffer()364     StagingBuffer::~StagingBuffer() {
365         if (mBuffer) {
366             mDevice->DecrementMemoryUsage(GetSize());
367         }
368     }
369 
Initialize()370     MaybeError StagingBuffer::Initialize() {
371         DAWN_TRY(mDevice->IncrementMemoryUsage(GetSize()));
372         mBuffer = std::make_unique<uint8_t[]>(GetSize());
373         mMappedPointer = mBuffer.get();
374         return {};
375     }
376 
377 }}  // namespace dawn_native::null
378