• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017 The Dawn Authors
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "dawn_native/Device.h"
16 
17 #include "dawn_native/Adapter.h"
18 #include "dawn_native/BindGroup.h"
19 #include "dawn_native/BindGroupLayout.h"
20 #include "dawn_native/Buffer.h"
21 #include "dawn_native/CommandBuffer.h"
22 #include "dawn_native/CommandEncoder.h"
23 #include "dawn_native/ComputePipeline.h"
24 #include "dawn_native/DynamicUploader.h"
25 #include "dawn_native/ErrorData.h"
26 #include "dawn_native/Fence.h"
27 #include "dawn_native/FenceSignalTracker.h"
28 #include "dawn_native/Instance.h"
29 #include "dawn_native/PipelineLayout.h"
30 #include "dawn_native/Queue.h"
31 #include "dawn_native/RenderPipeline.h"
32 #include "dawn_native/Sampler.h"
33 #include "dawn_native/ShaderModule.h"
34 #include "dawn_native/SwapChain.h"
35 #include "dawn_native/Texture.h"
36 
37 #include <unordered_set>
38 
39 namespace dawn_native {
40 
41     // DeviceBase::Caches
42 
43     // The caches are unordered_sets of pointers with special hash and compare functions
44     // to compare the value of the objects, instead of the pointers.
45     template <typename Object>
46     using ContentLessObjectCache =
47         std::unordered_set<Object*, typename Object::HashFunc, typename Object::EqualityFunc>;
48 
49     struct DeviceBase::Caches {
50         ContentLessObjectCache<BindGroupLayoutBase> bindGroupLayouts;
51         ContentLessObjectCache<ComputePipelineBase> computePipelines;
52         ContentLessObjectCache<PipelineLayoutBase> pipelineLayouts;
53         ContentLessObjectCache<RenderPipelineBase> renderPipelines;
54         ContentLessObjectCache<SamplerBase> samplers;
55         ContentLessObjectCache<ShaderModuleBase> shaderModules;
56     };
57 
58     // DeviceBase
59 
DeviceBase(AdapterBase * adapter,const DeviceDescriptor * descriptor)60     DeviceBase::DeviceBase(AdapterBase* adapter, const DeviceDescriptor* descriptor)
61         : mAdapter(adapter) {
62         mCaches = std::make_unique<DeviceBase::Caches>();
63         mFenceSignalTracker = std::make_unique<FenceSignalTracker>(this);
64         mDynamicUploader = std::make_unique<DynamicUploader>(this);
65         SetDefaultToggles();
66 
67         mFormatTable = BuildFormatTable(this);
68     }
69 
~DeviceBase()70     DeviceBase::~DeviceBase() {
71         // Devices must explicitly free the uploader
72         ASSERT(mDynamicUploader == nullptr);
73         ASSERT(mDeferredCreateBufferMappedAsyncResults.empty());
74     }
75 
HandleError(const char * message)76     void DeviceBase::HandleError(const char* message) {
77         if (mErrorCallback) {
78             mErrorCallback(message, mErrorUserdata);
79         }
80     }
81 
SetErrorCallback(dawn::DeviceErrorCallback callback,void * userdata)82     void DeviceBase::SetErrorCallback(dawn::DeviceErrorCallback callback, void* userdata) {
83         mErrorCallback = callback;
84         mErrorUserdata = userdata;
85     }
86 
ValidateObject(const ObjectBase * object) const87     MaybeError DeviceBase::ValidateObject(const ObjectBase* object) const {
88         if (DAWN_UNLIKELY(object->GetDevice() != this)) {
89             return DAWN_VALIDATION_ERROR("Object from a different device.");
90         }
91         if (DAWN_UNLIKELY(object->IsError())) {
92             return DAWN_VALIDATION_ERROR("Object is an error.");
93         }
94         return {};
95     }
96 
GetAdapter() const97     AdapterBase* DeviceBase::GetAdapter() const {
98         return mAdapter;
99     }
100 
GetFenceSignalTracker() const101     FenceSignalTracker* DeviceBase::GetFenceSignalTracker() const {
102         return mFenceSignalTracker.get();
103     }
104 
GetInternalFormat(dawn::TextureFormat format) const105     ResultOrError<const Format*> DeviceBase::GetInternalFormat(dawn::TextureFormat format) const {
106         size_t index = ComputeFormatIndex(format);
107         if (index >= mFormatTable.size()) {
108             return DAWN_VALIDATION_ERROR("Unknown texture format");
109         }
110 
111         const Format* internalFormat = &mFormatTable[index];
112         if (!internalFormat->isSupported) {
113             return DAWN_VALIDATION_ERROR("Unsupported texture format");
114         }
115 
116         return internalFormat;
117     }
118 
GetValidInternalFormat(dawn::TextureFormat format) const119     const Format& DeviceBase::GetValidInternalFormat(dawn::TextureFormat format) const {
120         size_t index = ComputeFormatIndex(format);
121         ASSERT(index < mFormatTable.size());
122         ASSERT(mFormatTable[index].isSupported);
123         return mFormatTable[index];
124     }
125 
GetOrCreateBindGroupLayout(const BindGroupLayoutDescriptor * descriptor)126     ResultOrError<BindGroupLayoutBase*> DeviceBase::GetOrCreateBindGroupLayout(
127         const BindGroupLayoutDescriptor* descriptor) {
128         BindGroupLayoutBase blueprint(this, descriptor, true);
129 
130         auto iter = mCaches->bindGroupLayouts.find(&blueprint);
131         if (iter != mCaches->bindGroupLayouts.end()) {
132             (*iter)->Reference();
133             return *iter;
134         }
135 
136         BindGroupLayoutBase* backendObj;
137         DAWN_TRY_ASSIGN(backendObj, CreateBindGroupLayoutImpl(descriptor));
138         mCaches->bindGroupLayouts.insert(backendObj);
139         return backendObj;
140     }
141 
UncacheBindGroupLayout(BindGroupLayoutBase * obj)142     void DeviceBase::UncacheBindGroupLayout(BindGroupLayoutBase* obj) {
143         size_t removedCount = mCaches->bindGroupLayouts.erase(obj);
144         ASSERT(removedCount == 1);
145     }
146 
GetOrCreateComputePipeline(const ComputePipelineDescriptor * descriptor)147     ResultOrError<ComputePipelineBase*> DeviceBase::GetOrCreateComputePipeline(
148         const ComputePipelineDescriptor* descriptor) {
149         ComputePipelineBase blueprint(this, descriptor, true);
150 
151         auto iter = mCaches->computePipelines.find(&blueprint);
152         if (iter != mCaches->computePipelines.end()) {
153             (*iter)->Reference();
154             return *iter;
155         }
156 
157         ComputePipelineBase* backendObj;
158         DAWN_TRY_ASSIGN(backendObj, CreateComputePipelineImpl(descriptor));
159         mCaches->computePipelines.insert(backendObj);
160         return backendObj;
161     }
162 
UncacheComputePipeline(ComputePipelineBase * obj)163     void DeviceBase::UncacheComputePipeline(ComputePipelineBase* obj) {
164         size_t removedCount = mCaches->computePipelines.erase(obj);
165         ASSERT(removedCount == 1);
166     }
167 
GetOrCreatePipelineLayout(const PipelineLayoutDescriptor * descriptor)168     ResultOrError<PipelineLayoutBase*> DeviceBase::GetOrCreatePipelineLayout(
169         const PipelineLayoutDescriptor* descriptor) {
170         PipelineLayoutBase blueprint(this, descriptor, true);
171 
172         auto iter = mCaches->pipelineLayouts.find(&blueprint);
173         if (iter != mCaches->pipelineLayouts.end()) {
174             (*iter)->Reference();
175             return *iter;
176         }
177 
178         PipelineLayoutBase* backendObj;
179         DAWN_TRY_ASSIGN(backendObj, CreatePipelineLayoutImpl(descriptor));
180         mCaches->pipelineLayouts.insert(backendObj);
181         return backendObj;
182     }
183 
UncachePipelineLayout(PipelineLayoutBase * obj)184     void DeviceBase::UncachePipelineLayout(PipelineLayoutBase* obj) {
185         size_t removedCount = mCaches->pipelineLayouts.erase(obj);
186         ASSERT(removedCount == 1);
187     }
188 
GetOrCreateRenderPipeline(const RenderPipelineDescriptor * descriptor)189     ResultOrError<RenderPipelineBase*> DeviceBase::GetOrCreateRenderPipeline(
190         const RenderPipelineDescriptor* descriptor) {
191         RenderPipelineBase blueprint(this, descriptor, true);
192 
193         auto iter = mCaches->renderPipelines.find(&blueprint);
194         if (iter != mCaches->renderPipelines.end()) {
195             (*iter)->Reference();
196             return *iter;
197         }
198 
199         RenderPipelineBase* backendObj;
200         DAWN_TRY_ASSIGN(backendObj, CreateRenderPipelineImpl(descriptor));
201         mCaches->renderPipelines.insert(backendObj);
202         return backendObj;
203     }
204 
UncacheRenderPipeline(RenderPipelineBase * obj)205     void DeviceBase::UncacheRenderPipeline(RenderPipelineBase* obj) {
206         size_t removedCount = mCaches->renderPipelines.erase(obj);
207         ASSERT(removedCount == 1);
208     }
209 
GetOrCreateSampler(const SamplerDescriptor * descriptor)210     ResultOrError<SamplerBase*> DeviceBase::GetOrCreateSampler(
211         const SamplerDescriptor* descriptor) {
212         SamplerBase blueprint(this, descriptor, true);
213 
214         auto iter = mCaches->samplers.find(&blueprint);
215         if (iter != mCaches->samplers.end()) {
216             (*iter)->Reference();
217             return *iter;
218         }
219 
220         SamplerBase* backendObj;
221         DAWN_TRY_ASSIGN(backendObj, CreateSamplerImpl(descriptor));
222         mCaches->samplers.insert(backendObj);
223         return backendObj;
224     }
225 
UncacheSampler(SamplerBase * obj)226     void DeviceBase::UncacheSampler(SamplerBase* obj) {
227         size_t removedCount = mCaches->samplers.erase(obj);
228         ASSERT(removedCount == 1);
229     }
230 
GetOrCreateShaderModule(const ShaderModuleDescriptor * descriptor)231     ResultOrError<ShaderModuleBase*> DeviceBase::GetOrCreateShaderModule(
232         const ShaderModuleDescriptor* descriptor) {
233         ShaderModuleBase blueprint(this, descriptor, true);
234 
235         auto iter = mCaches->shaderModules.find(&blueprint);
236         if (iter != mCaches->shaderModules.end()) {
237             (*iter)->Reference();
238             return *iter;
239         }
240 
241         ShaderModuleBase* backendObj;
242         DAWN_TRY_ASSIGN(backendObj, CreateShaderModuleImpl(descriptor));
243         mCaches->shaderModules.insert(backendObj);
244         return backendObj;
245     }
246 
UncacheShaderModule(ShaderModuleBase * obj)247     void DeviceBase::UncacheShaderModule(ShaderModuleBase* obj) {
248         size_t removedCount = mCaches->shaderModules.erase(obj);
249         ASSERT(removedCount == 1);
250     }
251 
252     // Object creation API methods
253 
CreateBindGroup(const BindGroupDescriptor * descriptor)254     BindGroupBase* DeviceBase::CreateBindGroup(const BindGroupDescriptor* descriptor) {
255         BindGroupBase* result = nullptr;
256 
257         if (ConsumedError(CreateBindGroupInternal(&result, descriptor))) {
258             return BindGroupBase::MakeError(this);
259         }
260 
261         return result;
262     }
CreateBindGroupLayout(const BindGroupLayoutDescriptor * descriptor)263     BindGroupLayoutBase* DeviceBase::CreateBindGroupLayout(
264         const BindGroupLayoutDescriptor* descriptor) {
265         BindGroupLayoutBase* result = nullptr;
266 
267         if (ConsumedError(CreateBindGroupLayoutInternal(&result, descriptor))) {
268             return BindGroupLayoutBase::MakeError(this);
269         }
270 
271         return result;
272     }
CreateBuffer(const BufferDescriptor * descriptor)273     BufferBase* DeviceBase::CreateBuffer(const BufferDescriptor* descriptor) {
274         BufferBase* result = nullptr;
275 
276         if (ConsumedError(CreateBufferInternal(&result, descriptor))) {
277             return BufferBase::MakeError(this);
278         }
279 
280         return result;
281     }
CreateBufferMapped(const BufferDescriptor * descriptor)282     DawnCreateBufferMappedResult DeviceBase::CreateBufferMapped(
283         const BufferDescriptor* descriptor) {
284         BufferBase* buffer = nullptr;
285         uint8_t* data = nullptr;
286 
287         uint64_t size = descriptor->size;
288         if (ConsumedError(CreateBufferInternal(&buffer, descriptor)) ||
289             ConsumedError(buffer->MapAtCreation(&data))) {
290             // Map failed. Replace the buffer with an error buffer.
291             if (buffer != nullptr) {
292                 delete buffer;
293             }
294             buffer = BufferBase::MakeErrorMapped(this, size, &data);
295         }
296 
297         ASSERT(buffer != nullptr);
298         if (data == nullptr) {
299             // |data| may be nullptr if there was an OOM in MakeErrorMapped.
300             // Non-zero dataLength and nullptr data is used to indicate there should be
301             // mapped data but the allocation failed.
302             ASSERT(buffer->IsError());
303         } else {
304             memset(data, 0, size);
305         }
306 
307         DawnCreateBufferMappedResult result = {};
308         result.buffer = reinterpret_cast<DawnBuffer>(buffer);
309         result.data = data;
310         result.dataLength = size;
311 
312         return result;
313     }
CreateBufferMappedAsync(const BufferDescriptor * descriptor,dawn::BufferCreateMappedCallback callback,void * userdata)314     void DeviceBase::CreateBufferMappedAsync(const BufferDescriptor* descriptor,
315                                              dawn::BufferCreateMappedCallback callback,
316                                              void* userdata) {
317         DawnCreateBufferMappedResult result = CreateBufferMapped(descriptor);
318 
319         DawnBufferMapAsyncStatus status = DAWN_BUFFER_MAP_ASYNC_STATUS_SUCCESS;
320         if (result.data == nullptr || result.dataLength != descriptor->size) {
321             status = DAWN_BUFFER_MAP_ASYNC_STATUS_ERROR;
322         }
323 
324         DeferredCreateBufferMappedAsync deferred_info;
325         deferred_info.callback = callback;
326         deferred_info.status = status;
327         deferred_info.result = result;
328         deferred_info.userdata = userdata;
329 
330         // The callback is deferred so it matches the async behavior of WebGPU.
331         mDeferredCreateBufferMappedAsyncResults.push_back(deferred_info);
332     }
CreateCommandEncoder(const CommandEncoderDescriptor * descriptor)333     CommandEncoderBase* DeviceBase::CreateCommandEncoder(
334         const CommandEncoderDescriptor* descriptor) {
335         return new CommandEncoderBase(this, descriptor);
336     }
CreateComputePipeline(const ComputePipelineDescriptor * descriptor)337     ComputePipelineBase* DeviceBase::CreateComputePipeline(
338         const ComputePipelineDescriptor* descriptor) {
339         ComputePipelineBase* result = nullptr;
340 
341         if (ConsumedError(CreateComputePipelineInternal(&result, descriptor))) {
342             return ComputePipelineBase::MakeError(this);
343         }
344 
345         return result;
346     }
CreatePipelineLayout(const PipelineLayoutDescriptor * descriptor)347     PipelineLayoutBase* DeviceBase::CreatePipelineLayout(
348         const PipelineLayoutDescriptor* descriptor) {
349         PipelineLayoutBase* result = nullptr;
350 
351         if (ConsumedError(CreatePipelineLayoutInternal(&result, descriptor))) {
352             return PipelineLayoutBase::MakeError(this);
353         }
354 
355         return result;
356     }
CreateQueue()357     QueueBase* DeviceBase::CreateQueue() {
358         QueueBase* result = nullptr;
359 
360         if (ConsumedError(CreateQueueInternal(&result))) {
361             // If queue creation failure ever becomes possible, we should implement MakeError and
362             // friends for them.
363             UNREACHABLE();
364             return nullptr;
365         }
366 
367         return result;
368     }
CreateSampler(const SamplerDescriptor * descriptor)369     SamplerBase* DeviceBase::CreateSampler(const SamplerDescriptor* descriptor) {
370         SamplerBase* result = nullptr;
371 
372         if (ConsumedError(CreateSamplerInternal(&result, descriptor))) {
373             return SamplerBase::MakeError(this);
374         }
375 
376         return result;
377     }
CreateRenderPipeline(const RenderPipelineDescriptor * descriptor)378     RenderPipelineBase* DeviceBase::CreateRenderPipeline(
379         const RenderPipelineDescriptor* descriptor) {
380         RenderPipelineBase* result = nullptr;
381 
382         if (ConsumedError(CreateRenderPipelineInternal(&result, descriptor))) {
383             return RenderPipelineBase::MakeError(this);
384         }
385 
386         return result;
387     }
CreateShaderModule(const ShaderModuleDescriptor * descriptor)388     ShaderModuleBase* DeviceBase::CreateShaderModule(const ShaderModuleDescriptor* descriptor) {
389         ShaderModuleBase* result = nullptr;
390 
391         if (ConsumedError(CreateShaderModuleInternal(&result, descriptor))) {
392             return ShaderModuleBase::MakeError(this);
393         }
394 
395         return result;
396     }
CreateSwapChain(const SwapChainDescriptor * descriptor)397     SwapChainBase* DeviceBase::CreateSwapChain(const SwapChainDescriptor* descriptor) {
398         SwapChainBase* result = nullptr;
399 
400         if (ConsumedError(CreateSwapChainInternal(&result, descriptor))) {
401             return SwapChainBase::MakeError(this);
402         }
403 
404         return result;
405     }
CreateTexture(const TextureDescriptor * descriptor)406     TextureBase* DeviceBase::CreateTexture(const TextureDescriptor* descriptor) {
407         TextureBase* result = nullptr;
408 
409         if (ConsumedError(CreateTextureInternal(&result, descriptor))) {
410             return TextureBase::MakeError(this);
411         }
412 
413         return result;
414     }
CreateTextureView(TextureBase * texture,const TextureViewDescriptor * descriptor)415     TextureViewBase* DeviceBase::CreateTextureView(TextureBase* texture,
416                                                    const TextureViewDescriptor* descriptor) {
417         TextureViewBase* result = nullptr;
418 
419         if (ConsumedError(CreateTextureViewInternal(&result, texture, descriptor))) {
420             return TextureViewBase::MakeError(this);
421         }
422 
423         return result;
424     }
425 
426     // Other Device API methods
427 
Tick()428     void DeviceBase::Tick() {
429         TickImpl();
430         {
431             auto deferredResults = std::move(mDeferredCreateBufferMappedAsyncResults);
432             for (const auto& deferred : deferredResults) {
433                 deferred.callback(deferred.status, deferred.result, deferred.userdata);
434             }
435         }
436         mFenceSignalTracker->Tick(GetCompletedCommandSerial());
437     }
438 
Reference()439     void DeviceBase::Reference() {
440         ASSERT(mRefCount != 0);
441         mRefCount++;
442     }
443 
Release()444     void DeviceBase::Release() {
445         ASSERT(mRefCount != 0);
446         mRefCount--;
447         if (mRefCount == 0) {
448             delete this;
449         }
450     }
451 
ApplyToggleOverrides(const DeviceDescriptor * deviceDescriptor)452     void DeviceBase::ApplyToggleOverrides(const DeviceDescriptor* deviceDescriptor) {
453         ASSERT(deviceDescriptor);
454 
455         for (const char* toggleName : deviceDescriptor->forceEnabledToggles) {
456             Toggle toggle = GetAdapter()->GetInstance()->ToggleNameToEnum(toggleName);
457             if (toggle != Toggle::InvalidEnum) {
458                 mTogglesSet.SetToggle(toggle, true);
459             }
460         }
461         for (const char* toggleName : deviceDescriptor->forceDisabledToggles) {
462             Toggle toggle = GetAdapter()->GetInstance()->ToggleNameToEnum(toggleName);
463             if (toggle != Toggle::InvalidEnum) {
464                 mTogglesSet.SetToggle(toggle, false);
465             }
466         }
467     }
468 
GetTogglesUsed() const469     std::vector<const char*> DeviceBase::GetTogglesUsed() const {
470         std::vector<const char*> togglesNameInUse(mTogglesSet.toggleBitset.count());
471 
472         uint32_t index = 0;
473         for (uint32_t i : IterateBitSet(mTogglesSet.toggleBitset)) {
474             const char* toggleName =
475                 GetAdapter()->GetInstance()->ToggleEnumToName(static_cast<Toggle>(i));
476             togglesNameInUse[index] = toggleName;
477             ++index;
478         }
479 
480         return togglesNameInUse;
481     }
482 
IsToggleEnabled(Toggle toggle) const483     bool DeviceBase::IsToggleEnabled(Toggle toggle) const {
484         return mTogglesSet.IsEnabled(toggle);
485     }
486 
SetDefaultToggles()487     void DeviceBase::SetDefaultToggles() {
488         // Sets the default-enabled toggles
489         mTogglesSet.SetToggle(Toggle::LazyClearResourceOnFirstUse, true);
490     }
491 
492     // Implementation details of object creation
493 
CreateBindGroupInternal(BindGroupBase ** result,const BindGroupDescriptor * descriptor)494     MaybeError DeviceBase::CreateBindGroupInternal(BindGroupBase** result,
495                                                    const BindGroupDescriptor* descriptor) {
496         DAWN_TRY(ValidateBindGroupDescriptor(this, descriptor));
497         DAWN_TRY_ASSIGN(*result, CreateBindGroupImpl(descriptor));
498         return {};
499     }
500 
CreateBindGroupLayoutInternal(BindGroupLayoutBase ** result,const BindGroupLayoutDescriptor * descriptor)501     MaybeError DeviceBase::CreateBindGroupLayoutInternal(
502         BindGroupLayoutBase** result,
503         const BindGroupLayoutDescriptor* descriptor) {
504         DAWN_TRY(ValidateBindGroupLayoutDescriptor(this, descriptor));
505         DAWN_TRY_ASSIGN(*result, GetOrCreateBindGroupLayout(descriptor));
506         return {};
507     }
508 
CreateBufferInternal(BufferBase ** result,const BufferDescriptor * descriptor)509     MaybeError DeviceBase::CreateBufferInternal(BufferBase** result,
510                                                 const BufferDescriptor* descriptor) {
511         DAWN_TRY(ValidateBufferDescriptor(this, descriptor));
512         DAWN_TRY_ASSIGN(*result, CreateBufferImpl(descriptor));
513         return {};
514     }
515 
CreateComputePipelineInternal(ComputePipelineBase ** result,const ComputePipelineDescriptor * descriptor)516     MaybeError DeviceBase::CreateComputePipelineInternal(
517         ComputePipelineBase** result,
518         const ComputePipelineDescriptor* descriptor) {
519         DAWN_TRY(ValidateComputePipelineDescriptor(this, descriptor));
520         DAWN_TRY_ASSIGN(*result, GetOrCreateComputePipeline(descriptor));
521         return {};
522     }
523 
CreatePipelineLayoutInternal(PipelineLayoutBase ** result,const PipelineLayoutDescriptor * descriptor)524     MaybeError DeviceBase::CreatePipelineLayoutInternal(
525         PipelineLayoutBase** result,
526         const PipelineLayoutDescriptor* descriptor) {
527         DAWN_TRY(ValidatePipelineLayoutDescriptor(this, descriptor));
528         DAWN_TRY_ASSIGN(*result, GetOrCreatePipelineLayout(descriptor));
529         return {};
530     }
531 
CreateQueueInternal(QueueBase ** result)532     MaybeError DeviceBase::CreateQueueInternal(QueueBase** result) {
533         DAWN_TRY_ASSIGN(*result, CreateQueueImpl());
534         return {};
535     }
536 
CreateRenderPipelineInternal(RenderPipelineBase ** result,const RenderPipelineDescriptor * descriptor)537     MaybeError DeviceBase::CreateRenderPipelineInternal(
538         RenderPipelineBase** result,
539         const RenderPipelineDescriptor* descriptor) {
540         DAWN_TRY(ValidateRenderPipelineDescriptor(this, descriptor));
541         DAWN_TRY_ASSIGN(*result, GetOrCreateRenderPipeline(descriptor));
542         return {};
543     }
544 
CreateSamplerInternal(SamplerBase ** result,const SamplerDescriptor * descriptor)545     MaybeError DeviceBase::CreateSamplerInternal(SamplerBase** result,
546                                                  const SamplerDescriptor* descriptor) {
547         DAWN_TRY(ValidateSamplerDescriptor(this, descriptor));
548         DAWN_TRY_ASSIGN(*result, GetOrCreateSampler(descriptor));
549         return {};
550     }
551 
CreateShaderModuleInternal(ShaderModuleBase ** result,const ShaderModuleDescriptor * descriptor)552     MaybeError DeviceBase::CreateShaderModuleInternal(ShaderModuleBase** result,
553                                                       const ShaderModuleDescriptor* descriptor) {
554         DAWN_TRY(ValidateShaderModuleDescriptor(this, descriptor));
555         DAWN_TRY_ASSIGN(*result, GetOrCreateShaderModule(descriptor));
556         return {};
557     }
558 
CreateSwapChainInternal(SwapChainBase ** result,const SwapChainDescriptor * descriptor)559     MaybeError DeviceBase::CreateSwapChainInternal(SwapChainBase** result,
560                                                    const SwapChainDescriptor* descriptor) {
561         DAWN_TRY(ValidateSwapChainDescriptor(this, descriptor));
562         DAWN_TRY_ASSIGN(*result, CreateSwapChainImpl(descriptor));
563         return {};
564     }
565 
CreateTextureInternal(TextureBase ** result,const TextureDescriptor * descriptor)566     MaybeError DeviceBase::CreateTextureInternal(TextureBase** result,
567                                                  const TextureDescriptor* descriptor) {
568         DAWN_TRY(ValidateTextureDescriptor(this, descriptor));
569         DAWN_TRY_ASSIGN(*result, CreateTextureImpl(descriptor));
570         return {};
571     }
572 
CreateTextureViewInternal(TextureViewBase ** result,TextureBase * texture,const TextureViewDescriptor * descriptor)573     MaybeError DeviceBase::CreateTextureViewInternal(TextureViewBase** result,
574                                                      TextureBase* texture,
575                                                      const TextureViewDescriptor* descriptor) {
576         DAWN_TRY(ValidateTextureViewDescriptor(this, texture, descriptor));
577         DAWN_TRY_ASSIGN(*result, CreateTextureViewImpl(texture, descriptor));
578         return {};
579     }
580 
581     // Other implementation details
582 
ConsumeError(ErrorData * error)583     void DeviceBase::ConsumeError(ErrorData* error) {
584         ASSERT(error != nullptr);
585         HandleError(error->GetMessage().c_str());
586         delete error;
587     }
588 
GetDynamicUploader() const589     ResultOrError<DynamicUploader*> DeviceBase::GetDynamicUploader() const {
590         if (mDynamicUploader->IsEmpty()) {
591             DAWN_TRY(mDynamicUploader->CreateAndAppendBuffer());
592         }
593         return mDynamicUploader.get();
594     }
595 
SetToggle(Toggle toggle,bool isEnabled)596     void DeviceBase::SetToggle(Toggle toggle, bool isEnabled) {
597         mTogglesSet.SetToggle(toggle, isEnabled);
598     }
599 
600 }  // namespace dawn_native
601