• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2024 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // RefCountedEvent:
7 //    Manages reference count of VkEvent and its associated functions.
8 //
9 
10 #ifndef LIBANGLE_RENDERER_VULKAN_REFCOUNTED_EVENT_H_
11 #define LIBANGLE_RENDERER_VULKAN_REFCOUNTED_EVENT_H_
12 
13 #include <atomic>
14 #include <limits>
15 #include <queue>
16 
17 #include "common/PackedEnums.h"
18 #include "common/SimpleMutex.h"
19 #include "common/debug.h"
20 #include "libANGLE/renderer/serial_utils.h"
21 #include "libANGLE/renderer/vulkan/vk_resource.h"
22 #include "libANGLE/renderer/vulkan/vk_utils.h"
23 #include "libANGLE/renderer/vulkan/vk_wrapper.h"
24 
25 namespace rx
26 {
27 namespace vk
28 {
29 class Context;
30 enum class ImageLayout;
31 
32 // There are two ways to implement a barrier: Using VkCmdPipelineBarrier or VkCmdWaitEvents. The
33 // BarrierType enum will be passed around to indicate which barrier caller want to use.
34 enum class BarrierType
35 {
36     Pipeline,
37     Event,
38 };
39 
40 constexpr VkPipelineStageFlags kPreFragmentStageFlags =
41     VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
42     VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT;
43 
44 constexpr VkPipelineStageFlags kAllShadersPipelineStageFlags =
45     kPreFragmentStageFlags | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
46     VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
47 
48 constexpr VkPipelineStageFlags kAllDepthStencilPipelineStageFlags =
49     VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
50 
51 constexpr VkPipelineStageFlags kFragmentAndAttachmentPipelineStageFlags =
52     VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
53     VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
54 
55 // We group VK_PIPELINE_STAGE_*_BITs into different groups. The expectation is that execution within
56 // Fragment/PreFragment/Compute will not overlap. This information is used to optimize the usage of
57 // VkEvent where we try to not use it when we know that it will not provide benefits over
58 // pipelineBarriers.
59 enum class PipelineStageGroup : uint8_t
60 {
61     Other,
62     PreFragmentOnly,
63     FragmentOnly,
64     ComputeOnly,
65 
66     InvalidEnum,
67     EnumCount = InvalidEnum,
68 };
69 
70 class PipelineStageAccessHeuristic final
71 {
72   public:
73     constexpr PipelineStageAccessHeuristic() = default;
PipelineStageAccessHeuristic(PipelineStageGroup pipelineStageGroup)74     constexpr PipelineStageAccessHeuristic(PipelineStageGroup pipelineStageGroup)
75     {
76         for (size_t i = 0; i < kHeuristicWindowSize; i++)
77         {
78             mHeuristicBits <<= kPipelineStageGroupBitShift;
79             mHeuristicBits |= ToUnderlying(pipelineStageGroup);
80         }
81     }
onAccess(PipelineStageGroup pipelineStageGroup)82     void onAccess(PipelineStageGroup pipelineStageGroup)
83     {
84         mHeuristicBits <<= kPipelineStageGroupBitShift;
85         mHeuristicBits |= ToUnderlying(pipelineStageGroup);
86     }
87     constexpr bool operator==(const PipelineStageAccessHeuristic &other) const
88     {
89         return mHeuristicBits == other.mHeuristicBits;
90     }
91 
92   private:
93     static constexpr size_t kPipelineStageGroupBitShift = 2;
94     static_assert(ToUnderlying(PipelineStageGroup::EnumCount) <=
95                   (1 << kPipelineStageGroupBitShift));
96     static constexpr size_t kHeuristicWindowSize = 8;
97     angle::BitSet16<kHeuristicWindowSize * kPipelineStageGroupBitShift> mHeuristicBits;
98 };
99 static constexpr PipelineStageAccessHeuristic kPipelineStageAccessFragmentOnly =
100     PipelineStageAccessHeuristic(PipelineStageGroup::FragmentOnly);
101 static constexpr PipelineStageAccessHeuristic kPipelineStageAccessComputeOnly =
102     PipelineStageAccessHeuristic(PipelineStageGroup::ComputeOnly);
103 static constexpr PipelineStageAccessHeuristic kPipelineStageAccessPreFragmentOnly =
104     PipelineStageAccessHeuristic(PipelineStageGroup::PreFragmentOnly);
105 
106 // Enum for predefined VkPipelineStageFlags set that VkEvent will be using. Because VkEvent has
107 // strict rules that waitEvent and setEvent must have matching VkPipelineStageFlags, it is desirable
108 // to keep VkEvent per VkPipelineStageFlags combination. This enum table enumerates all possible
109 // pipeline stage combinations that VkEvent used with. The enum maps to VkPipelineStageFlags via
110 // Renderer::getPipelineStageMask call.
111 enum class EventStage : uint32_t
112 {
113     Transfer                               = 0,
114     VertexShader                           = 1,
115     FragmentShader                         = 2,
116     ComputeShader                          = 3,
117     AllShaders                             = 4,
118     PreFragmentShaders                     = 5,
119     FragmentShadingRate                    = 6,
120     Attachment                             = 7,
121     AttachmentAndFragmentShader            = 8,
122     AttachmentAndFragmentShaderAndTransfer = 9,
123     AttachmentAndAllShaders                = 10,
124     TransferAndComputeShader               = 11,
125     // For buffers only
126     VertexInput            = 12,
127     TransformFeedbackWrite = 13,
128     InvalidEnum            = 14,
129     EnumCount              = InvalidEnum,
130 };
131 using EventStageBitMask = typename angle::PackedEnumBitSet<EventStage, uint64_t>;
132 
133 using EventStageToVkPipelineStageFlagsMap = angle::PackedEnumMap<EventStage, VkPipelineStageFlags>;
134 // Initialize EventStage to VkPipelineStageFlags mapping table.
135 void InitializeEventStageToVkPipelineStageFlagsMap(
136     EventStageToVkPipelineStageFlagsMap *map,
137     VkPipelineStageFlags supportedVulkanPipelineStageMask);
138 
139 // VkCmdWaitEvents requires srcStageMask must be the bitwise OR of the stageMask parameter used in
140 // previous calls to vkCmdSetEvent (See VUID-vkCmdWaitEvents-srcStageMask-01158). This mean we must
141 // keep the record of what stageMask each event has been used in VkCmdSetEvent call so that we can
142 // retrieve that information when we need to wait for the event. Instead of keeping just stageMask
143 // here, we keep the ImageLayout for now which gives us more information for debugging.
144 struct EventAndStage
145 {
validEventAndStage146     bool valid() const { return event.valid(); }
147     Event event;
148     EventStage eventStage;
149 };
150 
151 // The VkCmdSetEvent is called after VkCmdEndRenderPass and all images that used at the given
152 // pipeline stage (i.e, they have the same stageMask) will be tracked by the same event. This means
153 // there will be multiple objects pointing to the same event. Events are thus reference counted so
154 // that we do not destroy it while other objects still referencing to it.
155 class RefCountedEvent final
156 {
157   public:
RefCountedEvent()158     RefCountedEvent() { mHandle = nullptr; }
~RefCountedEvent()159     ~RefCountedEvent() { ASSERT(mHandle == nullptr); }
160 
161     // Move constructor moves reference of the underline object from other to this.
RefCountedEvent(RefCountedEvent && other)162     RefCountedEvent(RefCountedEvent &&other)
163     {
164         mHandle       = other.mHandle;
165         other.mHandle = nullptr;
166     }
167 
168     // Copy constructor adds reference to the underline object.
RefCountedEvent(const RefCountedEvent & other)169     RefCountedEvent(const RefCountedEvent &other)
170     {
171         ASSERT(other.valid());
172         mHandle = other.mHandle;
173         mHandle->addRef();
174     }
175 
176     // Move assignment moves reference of the underline object from other to this.
177     RefCountedEvent &operator=(RefCountedEvent &&other)
178     {
179         ASSERT(!valid());
180         ASSERT(other.valid());
181         std::swap(mHandle, other.mHandle);
182         return *this;
183     }
184 
185     // Copy assignment adds reference to the underline object.
186     RefCountedEvent &operator=(const RefCountedEvent &other)
187     {
188         ASSERT(!valid());
189         ASSERT(other.valid());
190         mHandle = other.mHandle;
191         mHandle->addRef();
192         return *this;
193     }
194 
195     // Returns true if both points to the same underline object.
196     bool operator==(const RefCountedEvent &other) const { return mHandle == other.mHandle; }
197 
198     // Create VkEvent and associated it with given layout. Returns true if success and false if
199     // failed.
200     bool init(Context *context, EventStage eventStage);
201 
202     // Release one reference count to the underline Event object and destroy or recycle the handle
203     // to renderer's recycler if this is the very last reference.
204     void release(Renderer *renderer);
205 
206     // Release one reference count to the underline Event object and destroy or recycle the handle
207     // to the context share group's recycler if this is the very last reference.
208     void release(Context *context);
209 
210     // Destroy the event and mHandle. Caller must ensure there is no outstanding reference to the
211     // mHandle.
212     void destroy(VkDevice device);
213 
valid()214     bool valid() const { return mHandle != nullptr; }
215 
216     // Only intended for assertion in recycler
validAndNoReference()217     bool validAndNoReference() const { return mHandle != nullptr && !mHandle->isReferenced(); }
218 
219     // Returns the underlying Event object
getEvent()220     const Event &getEvent() const
221     {
222         ASSERT(valid());
223         return mHandle->get().event;
224     }
225 
getEventStage()226     EventStage getEventStage() const
227     {
228         ASSERT(mHandle != nullptr);
229         return mHandle->get().eventStage;
230     }
231 
232     VkPipelineStageFlags getPipelineStageMask(Renderer *renderer) const;
233 
234   private:
235     // Release one reference count to the underline Event object and destroy or recycle the handle
236     // to the provided recycler if this is the very last reference.
237     friend class RefCountedEventsGarbage;
238     template <typename RecyclerT>
239     void releaseImpl(Renderer *renderer, RecyclerT *recycler);
240 
241     RefCounted<EventAndStage> *mHandle;
242 };
243 using RefCountedEventCollector = std::deque<RefCountedEvent>;
244 
245 // Tracks a list of RefCountedEvents per EventStage.
246 class RefCountedEventArray : angle::NonCopyable
247 {
248   public:
249     RefCountedEventArray &operator=(RefCountedEventArray &&other)
250     {
251         for (EventStage stage : other.mBitMask)
252         {
253             mEvents[stage] = std::move(other.mEvents[stage]);
254         }
255         mBitMask = std::move(other.mBitMask);
256         other.mBitMask.reset();
257         return *this;
258     }
259 
260     void release(Renderer *renderer);
261     void release(Context *context);
262 
263     void releaseToEventCollector(RefCountedEventCollector *eventCollector);
264 
getEvent(EventStage eventStage)265     const RefCountedEvent &getEvent(EventStage eventStage) const { return mEvents[eventStage]; }
266 
267     bool initEventAtStage(Context *context, EventStage eventStage);
268 
empty()269     bool empty() const { return mBitMask.none(); }
getBitMask()270     const EventStageBitMask getBitMask() const { return mBitMask; }
271 
272     template <typename CommandBufferT>
273     void flushSetEvents(Renderer *renderer, CommandBufferT *commandBuffer) const;
274 
275   protected:
276     angle::PackedEnumMap<EventStage, RefCountedEvent> mEvents;
277     // The mask is used to accelerate the loop of map
278     EventStageBitMask mBitMask;
279 };
280 
281 class RefCountedEventArrayWithAccessFlags final : public RefCountedEventArray
282 {
283   public:
RefCountedEventArrayWithAccessFlags()284     RefCountedEventArrayWithAccessFlags() { mAccessFlags.fill(0); }
replaceEventAtStage(Context * context,EventStage eventStage,const RefCountedEvent & event,VkAccessFlags accessFlags)285     void replaceEventAtStage(Context *context,
286                              EventStage eventStage,
287                              const RefCountedEvent &event,
288                              VkAccessFlags accessFlags)
289     {
290         if (mBitMask[eventStage])
291         {
292             mEvents[eventStage].release(context);
293         }
294         mEvents[eventStage] = event;
295         mAccessFlags[eventStage] |= accessFlags;
296         mBitMask.set(eventStage);
297     }
getAccessFlags(EventStage eventStage)298     VkAccessFlags getAccessFlags(EventStage eventStage) const
299     {
300         ASSERT(mBitMask[eventStage]);
301         return mAccessFlags[eventStage];
302     }
releaseToEventCollector(RefCountedEventCollector * eventCollector)303     void releaseToEventCollector(RefCountedEventCollector *eventCollector)
304     {
305         for (EventStage eventStage : mBitMask)
306         {
307             eventCollector->emplace_back(std::move(mEvents[eventStage]));
308             mAccessFlags[eventStage] = 0;
309         }
310         mBitMask.reset();
311     }
hasEventAndAccess(EventStage eventStage,VkAccessFlags accessType)312     bool hasEventAndAccess(EventStage eventStage, VkAccessFlags accessType) const
313     {
314         return mBitMask.test(eventStage) && (mAccessFlags[eventStage] & accessType) == accessType;
315     }
316 
317   private:
318     angle::PackedEnumMap<EventStage, VkAccessFlags> mAccessFlags;
319 };
320 
321 class RefCountedEventWithAccessFlags final
322 {
323   public:
RefCountedEventWithAccessFlags()324     RefCountedEventWithAccessFlags() : mAccessFlags(0) {}
325 
release(Renderer * renderer)326     void release(Renderer *renderer) { mEvent.release(renderer); }
release(Context * context)327     void release(Context *context) { mEvent.release(context); }
releaseToEventCollector(RefCountedEventCollector * eventCollector)328     void releaseToEventCollector(RefCountedEventCollector *eventCollector)
329     {
330         eventCollector->emplace_back(std::move(mEvent));
331         mAccessFlags = 0;
332     }
333     RefCountedEventWithAccessFlags &operator=(RefCountedEventWithAccessFlags &&other)
334     {
335         mEvent             = std::move(other.mEvent);
336         mAccessFlags       = other.mAccessFlags;
337         other.mAccessFlags = 0;
338         return *this;
339     }
340 
setEventAndAccessFlags(const RefCountedEvent & event,VkAccessFlags accessFlags)341     void setEventAndAccessFlags(const RefCountedEvent &event, VkAccessFlags accessFlags)
342     {
343         mEvent       = event;
344         mAccessFlags = accessFlags;
345     }
346 
getEvent()347     const RefCountedEvent &getEvent() const { return mEvent; }
getAccessFlags()348     VkAccessFlags getAccessFlags() const
349     {
350         ASSERT(mEvent.valid());
351         return mAccessFlags;
352     }
353 
valid()354     bool valid() const { return mEvent.valid(); }
355 
getEventStage()356     EventStage getEventStage() const { return mEvent.getEventStage(); }
357 
358   private:
359     RefCountedEvent mEvent;
360     VkAccessFlags mAccessFlags;
361 };
362 
363 // Only used by RenderPassCommandBufferHelper
364 class EventArray final : angle::NonCopyable
365 {
366   public:
367     void init(Renderer *renderer, const RefCountedEventArray &refCountedEventArray);
368 
empty()369     bool empty() const { return mBitMask.none(); }
370     void flushSetEvents(PrimaryCommandBuffer *primary);
371 
372   private:
373     // The mask is used to accelerate the loop of map
374     EventStageBitMask mBitMask;
375     angle::PackedEnumMap<EventStage, VkEvent> mEvents;
376     angle::PackedEnumMap<EventStage, VkPipelineStageFlags> mPipelineStageFlags;
377 };
378 
379 // This class tracks a vector of RefcountedEvent garbage. For performance reason, instead of
380 // individually tracking each VkEvent garbage, we collect all events that are accessed in the
381 // CommandBufferHelper into this class. After we submit the command buffer, we treat this vector of
382 // events as one garbage object and add it to renderer's garbage list. The garbage clean up will
383 // decrement the refCount and destroy event only when last refCount goes away. Basically all GPU
384 // usage will use one refCount and that refCount ensures we never destroy event until GPU is
385 // finished.
386 class RefCountedEventsGarbage final
387 {
388   public:
389     RefCountedEventsGarbage() = default;
~RefCountedEventsGarbage()390     ~RefCountedEventsGarbage() { ASSERT(mRefCountedEvents.empty()); }
391 
RefCountedEventsGarbage(const QueueSerial & queueSerial,RefCountedEventCollector && refCountedEvents)392     RefCountedEventsGarbage(const QueueSerial &queueSerial,
393                             RefCountedEventCollector &&refCountedEvents)
394         : mQueueSerial(queueSerial), mRefCountedEvents(std::move(refCountedEvents))
395     {
396         ASSERT(!mRefCountedEvents.empty());
397     }
398 
399     void destroy(Renderer *renderer);
400 
401     // Check the queue serial and release the events to recycler if GPU finished.
402     bool releaseIfComplete(Renderer *renderer, RefCountedEventsGarbageRecycler *recycler);
403 
404     // Check the queue serial and move all events to releasedBucket if GPU finished. This is only
405     // used by RefCountedEventRecycler.
406     bool moveIfComplete(Renderer *renderer, std::deque<RefCountedEventCollector> *releasedBucket);
407 
empty()408     bool empty() const { return mRefCountedEvents.empty(); }
409 
size()410     size_t size() const { return mRefCountedEvents.size(); }
411 
412   private:
413     QueueSerial mQueueSerial;
414     RefCountedEventCollector mRefCountedEvents;
415 };
416 
417 // Two levels of RefCountedEvents recycle system: For the performance reason, we have two levels of
418 // events recycler system. The first level is per ShareGroupVk, which owns RefCountedEventRecycler.
419 // RefCountedEvent garbage is added to it without any lock. Once GPU complete, the refCount is
420 // decremented. When the last refCount goes away, it goes into mEventsToReset. Note that since
421 // ShareGroupVk access is already protected by context share lock at the API level, so no lock is
422 // taken and reference counting is not atomic. At RefCountedEventsGarbageRecycler::cleanup time, the
423 // entire mEventsToReset is added into renderer's list. The renderer owns RefCountedEventRecycler
424 // list, and all access to it is protected with simple mutex lock. When any context calls
425 // OutsideRenderPassCommandBufferHelper::flushToPrimary, mEventsToReset is retrieved from renderer
426 // and the reset commands is added to the command buffer. The events are then moved to the
427 // renderer's garbage list. They are checked and along with renderer's garbage cleanup and if
428 // completed, they get moved to renderer's mEventsToReuse list. When a RefCountedEvent is needed, we
429 // always dip into ShareGroupVk's mEventsToReuse list. If its empty, it then dip into renderer's
430 // mEventsToReuse and grab a collector of events and try to reuse. That way the traffic into
431 // renderer is minimized as most of calls will be contained in SHareGroupVk.
432 
433 // Thread safe event recycler, protected by its own lock.
434 class RefCountedEventRecycler final
435 {
436   public:
RefCountedEventRecycler()437     RefCountedEventRecycler() {}
~RefCountedEventRecycler()438     ~RefCountedEventRecycler()
439     {
440         ASSERT(mEventsToReset.empty());
441         ASSERT(mResettingQueue.empty());
442         ASSERT(mEventsToReuse.empty());
443     }
444 
445     void destroy(VkDevice device);
446 
447     // Add single event to the toReset list
recycle(RefCountedEvent && garbageObject)448     void recycle(RefCountedEvent &&garbageObject)
449     {
450         ASSERT(garbageObject.validAndNoReference());
451         std::lock_guard<angle::SimpleMutex> lock(mMutex);
452         if (mEventsToReset.empty())
453         {
454             mEventsToReset.emplace_back();
455         }
456         mEventsToReset.back().emplace_back(std::move(garbageObject));
457     }
458 
459     // Add a list of events to the toReset list
recycle(RefCountedEventCollector && garbageObjects)460     void recycle(RefCountedEventCollector &&garbageObjects)
461     {
462         ASSERT(!garbageObjects.empty());
463         for (const RefCountedEvent &event : garbageObjects)
464         {
465             ASSERT(event.validAndNoReference());
466         }
467         std::lock_guard<angle::SimpleMutex> lock(mMutex);
468         mEventsToReset.emplace_back(std::move(garbageObjects));
469     }
470 
471     // Reset all events in the toReset list and move them to the toReuse list
472     void resetEvents(ErrorContext *context,
473                      const QueueSerial queueSerial,
474                      PrimaryCommandBuffer *commandbuffer);
475 
476     // Clean up the resetting event list and move completed events to the toReuse list.
477     // Number of events released is returned.
478     size_t cleanupResettingEvents(Renderer *renderer);
479 
480     // Fetch a list of events that are ready to be reused. Returns true if eventsToReuseOut is
481     // returned.
482     bool fetchEventsToReuse(RefCountedEventCollector *eventsToReuseOut);
483 
484   private:
485     angle::SimpleMutex mMutex;
486     // RefCountedEvent list that has been released, needs to be reset.
487     std::deque<RefCountedEventCollector> mEventsToReset;
488     // RefCountedEvent list that is currently resetting.
489     std::queue<RefCountedEventsGarbage> mResettingQueue;
490     // RefCountedEvent list that already has been reset. Ready to be reused.
491     std::deque<RefCountedEventCollector> mEventsToReuse;
492 };
493 
494 // Not thread safe event garbage collection and recycler. Caller must ensure the thread safety. It
495 // is intended to use by ShareGroupVk which all access should already protected by share context
496 // lock.
497 class RefCountedEventsGarbageRecycler final
498 {
499   public:
RefCountedEventsGarbageRecycler()500     RefCountedEventsGarbageRecycler() : mGarbageCount(0) {}
501     ~RefCountedEventsGarbageRecycler();
502 
503     // Release all garbage and free events.
504     void destroy(Renderer *renderer);
505 
506     // Walk the garbage list and move completed garbage to free list
507     void cleanup(Renderer *renderer);
508 
collectGarbage(const QueueSerial & queueSerial,RefCountedEventCollector && refCountedEvents)509     void collectGarbage(const QueueSerial &queueSerial, RefCountedEventCollector &&refCountedEvents)
510     {
511         mGarbageCount += refCountedEvents.size();
512         mGarbageQueue.emplace(queueSerial, std::move(refCountedEvents));
513     }
514 
recycle(RefCountedEvent && garbageObject)515     void recycle(RefCountedEvent &&garbageObject)
516     {
517         ASSERT(garbageObject.validAndNoReference());
518         mEventsToReset.emplace_back(std::move(garbageObject));
519     }
520 
521     bool fetch(Renderer *renderer, RefCountedEvent *outObject);
522 
getGarbageCount()523     size_t getGarbageCount() const { return mGarbageCount; }
524 
525   private:
526     RefCountedEventCollector mEventsToReset;
527     std::queue<RefCountedEventsGarbage> mGarbageQueue;
528     Recycler<RefCountedEvent> mEventsToReuse;
529     size_t mGarbageCount;
530 };
531 
532 // This wraps data and API for vkCmdWaitEvent call
533 class EventBarrier : angle::NonCopyable
534 {
535   public:
EventBarrier()536     EventBarrier()
537         : mSrcStageMask(0),
538           mDstStageMask(0),
539           mMemoryBarrierSrcAccess(0),
540           mMemoryBarrierDstAccess(0),
541           mImageMemoryBarrierCount(0),
542           mEvent(VK_NULL_HANDLE)
543     {}
544 
EventBarrier(VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkAccessFlags srcAccess,VkAccessFlags dstAccess,const VkEvent & event)545     EventBarrier(VkPipelineStageFlags srcStageMask,
546                  VkPipelineStageFlags dstStageMask,
547                  VkAccessFlags srcAccess,
548                  VkAccessFlags dstAccess,
549                  const VkEvent &event)
550         : mSrcStageMask(srcStageMask),
551           mDstStageMask(dstStageMask),
552           mMemoryBarrierSrcAccess(srcAccess),
553           mMemoryBarrierDstAccess(dstAccess),
554           mImageMemoryBarrierCount(0),
555           mEvent(event)
556     {
557         ASSERT(mEvent != VK_NULL_HANDLE);
558     }
559 
EventBarrier(VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,const VkEvent & event,const VkImageMemoryBarrier & imageMemoryBarrier)560     EventBarrier(VkPipelineStageFlags srcStageMask,
561                  VkPipelineStageFlags dstStageMask,
562                  const VkEvent &event,
563                  const VkImageMemoryBarrier &imageMemoryBarrier)
564         : mSrcStageMask(srcStageMask),
565           mDstStageMask(dstStageMask),
566           mMemoryBarrierSrcAccess(0),
567           mMemoryBarrierDstAccess(0),
568           mImageMemoryBarrierCount(1),
569           mEvent(event),
570           mImageMemoryBarrier(imageMemoryBarrier)
571     {
572         ASSERT(mEvent != VK_NULL_HANDLE);
573         ASSERT(mImageMemoryBarrier.image != VK_NULL_HANDLE);
574         ASSERT(mImageMemoryBarrier.pNext == nullptr);
575     }
576 
EventBarrier(EventBarrier && other)577     EventBarrier(EventBarrier &&other)
578     {
579         mSrcStageMask            = other.mSrcStageMask;
580         mDstStageMask            = other.mDstStageMask;
581         mMemoryBarrierSrcAccess  = other.mMemoryBarrierSrcAccess;
582         mMemoryBarrierDstAccess  = other.mMemoryBarrierDstAccess;
583         mImageMemoryBarrierCount = other.mImageMemoryBarrierCount;
584         std::swap(mEvent, other.mEvent);
585         std::swap(mImageMemoryBarrier, other.mImageMemoryBarrier);
586         other.mSrcStageMask            = 0;
587         other.mDstStageMask            = 0;
588         other.mMemoryBarrierSrcAccess  = 0;
589         other.mMemoryBarrierDstAccess  = 0;
590         other.mImageMemoryBarrierCount = 0;
591     }
592 
~EventBarrier()593     ~EventBarrier() {}
594 
isEmpty()595     bool isEmpty() const { return mEvent == VK_NULL_HANDLE; }
596 
hasEvent(const VkEvent & event)597     bool hasEvent(const VkEvent &event) const { return mEvent == event; }
598 
addAdditionalStageAccess(VkPipelineStageFlags dstStageMask,VkAccessFlags dstAccess)599     void addAdditionalStageAccess(VkPipelineStageFlags dstStageMask, VkAccessFlags dstAccess)
600     {
601         mDstStageMask |= dstStageMask;
602         mMemoryBarrierDstAccess |= dstAccess;
603     }
604 
605     void execute(PrimaryCommandBuffer *primary);
606 
607     void addDiagnosticsString(std::ostringstream &out) const;
608 
609   private:
610     friend class EventBarrierArray;
611     VkPipelineStageFlags mSrcStageMask;
612     VkPipelineStageFlags mDstStageMask;
613     VkAccessFlags mMemoryBarrierSrcAccess;
614     VkAccessFlags mMemoryBarrierDstAccess;
615     uint32_t mImageMemoryBarrierCount;
616     VkEvent mEvent;
617     VkImageMemoryBarrier mImageMemoryBarrier;
618 };
619 
620 class EventBarrierArray final
621 {
622   public:
isEmpty()623     bool isEmpty() const { return mBarriers.empty(); }
624 
625     void execute(Renderer *renderer, PrimaryCommandBuffer *primary);
626 
627     // Add the additional stageMask to the existing waitEvent.
628     void addAdditionalStageAccess(const RefCountedEvent &waitEvent,
629                                   VkPipelineStageFlags dstStageMask,
630                                   VkAccessFlags dstAccess);
631 
632     void addEventMemoryBarrier(Renderer *renderer,
633                                const RefCountedEvent &waitEvent,
634                                VkAccessFlags srcAccess,
635                                VkPipelineStageFlags dstStageMask,
636                                VkAccessFlags dstAccess);
637 
638     void addEventImageBarrier(Renderer *renderer,
639                               const RefCountedEvent &waitEvent,
640                               VkPipelineStageFlags dstStageMask,
641                               const VkImageMemoryBarrier &imageMemoryBarrier);
642 
reset()643     void reset() { ASSERT(mBarriers.empty()); }
644 
645     void addDiagnosticsString(std::ostringstream &out) const;
646 
647   private:
648     std::deque<EventBarrier> mBarriers;
649 };
650 }  // namespace vk
651 }  // namespace rx
652 #endif  // LIBANGLE_RENDERER_VULKAN_REFCOUNTED_EVENT_H_
653