aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCharlie Lao <cclao@google.com>2024-05-06 12:36:20 -0700
committerAngle LUCI CQ <angle-scoped@luci-project-accounts.iam.gserviceaccount.com>2024-05-15 20:32:23 +0000
commit0636b509e55da46fd117e8ef455ad0d54a6a9fe5 (patch)
treea46dbb7ab9fd60a0e17b604bc92abd28ac92c296
parent2e0aefe9205057109dd24ef76da14d3f39912dab (diff)
downloadangle-0636b509e55da46fd117e8ef455ad0d54a6a9fe5.tar.gz
Vulkan: Move RefCountedEvent GC and recycler to ShareGroupVk (2/3)
One of the problem we had with RefCountedEvents is CPU overhead comes with it, and some part of the CPU overhead is due to atomic reference counting. The RefCountedEvents are only used by ImageHelper and ImageHelpers are per share group, so they are already protected by front end context share lock. The only reason we needs atomic here is due to garbage cleanup, which runs in separate thread and will decrement the refCount. The idea is to move that garbage list from RendererVk to ShareGroupVk so that access of RefCountedEvents are all protected already, thus we can remove the use of atomic. The down side with this approach is that a share group will hold onto its event garbage and not available for other context to reuse. But VkEvents are expected to be very light weighted objects, so that should be acceptable. This is the second CL in the series. In this CL, we added RefCountedEventsGarbageRecycler to the ShareGroupVk which is responsible to garbage collect and recycle RefCountedEvent. Since most of ImageHelper code have only access to Context argument, for convenience we also stored the RefCountedEventsGarbageRecycler pointer in the vk::Context for easy access. vk::Context argument is also passed to RefCounteEvent::init and release function so that it has access to the recycler. The garbage collection happens when RefCountedEvent is needed. The per renderer recycler is still kept to hold the RefCounteEvents that gets released from ShareGroupVk or when it is released without access to context information. Bug: b/336844257 Change-Id: I36fe5d1c8dacdbe35bb2d380f94a32b9b72bbaa5 Reviewed-on: https://chromium-review.googlesource.com/c/angle/angle/+/5529951 Commit-Queue: Charlie Lao <cclao@google.com> Reviewed-by: Shahbaz Youssefi <syoussefi@chromium.org> Reviewed-by: Amirali Abdolrashidi <abdolrashidi@google.com>
-rw-r--r--src/libANGLE/renderer/vulkan/ContextVk.cpp28
-rw-r--r--src/libANGLE/renderer/vulkan/ShareGroupVk.cpp2
-rw-r--r--src/libANGLE/renderer/vulkan/ShareGroupVk.h21
-rw-r--r--src/libANGLE/renderer/vulkan/vk_helpers.cpp29
-rw-r--r--src/libANGLE/renderer/vulkan/vk_helpers.h4
-rw-r--r--src/libANGLE/renderer/vulkan/vk_ref_counted_event.cpp94
-rw-r--r--src/libANGLE/renderer/vulkan/vk_ref_counted_event.h62
-rw-r--r--src/libANGLE/renderer/vulkan/vk_renderer.cpp6
-rw-r--r--src/libANGLE/renderer/vulkan/vk_renderer.h10
-rw-r--r--src/libANGLE/renderer/vulkan/vk_utils.cpp4
-rw-r--r--src/libANGLE/renderer/vulkan/vk_utils.h7
11 files changed, 216 insertions, 51 deletions
diff --git a/src/libANGLE/renderer/vulkan/ContextVk.cpp b/src/libANGLE/renderer/vulkan/ContextVk.cpp
index fa23a72e91..8e67da4fe6 100644
--- a/src/libANGLE/renderer/vulkan/ContextVk.cpp
+++ b/src/libANGLE/renderer/vulkan/ContextVk.cpp
@@ -1209,6 +1209,10 @@ ContextVk::ContextVk(const gl::State &state, gl::ErrorSet *errorSet, vk::Rendere
mPipelineDirtyBitsMask.reset(gl::state::DIRTY_BIT_VERTEX_ARRAY_BINDING);
}
+ // Stash the mRefCountedEventRecycler in vk::Context for ImageHelper to conveniently access
+ mShareGroupRefCountedEventsGarbageRecycler =
+ mShareGroupVk->getRefCountedEventsGarbageRecycler();
+
angle::PerfMonitorCounterGroup vulkanGroup;
vulkanGroup.name = "vulkan";
@@ -3649,6 +3653,9 @@ angle::Result ContextVk::submitCommands(const vk::Semaphore *signalSemaphore,
// Now that we have submitted commands, some of pending garbage may no longer pending
// and should be moved to garbage list.
mRenderer->cleanupPendingSubmissionGarbage();
+ // In case of big amount of render/submission within one frame, if we accumulate excessive
+ // amount of garbage, also trigger the cleanup.
+ mShareGroupVk->cleanupExcessiveRefCountedEventGarbage(mRenderer);
mComputeDirtyBits |= mNewComputeCommandBufferDirtyBits;
@@ -7688,12 +7695,17 @@ angle::Result ContextVk::flushImpl(const vk::Semaphore *signalSemaphore,
// Try to detect frame boundary for both on screen and offscreen usage by detecting
// fush/finish/swap.
- if ((renderPassClosureReason == RenderPassClosureReason::GLFlush ||
- renderPassClosureReason == RenderPassClosureReason::GLFinish ||
- renderPassClosureReason == RenderPassClosureReason::EGLSwapBuffers) &&
- mShareGroupVk->isDueForBufferPoolPrune(mRenderer))
+ bool frameBoundary = renderPassClosureReason == RenderPassClosureReason::GLFlush ||
+ renderPassClosureReason == RenderPassClosureReason::GLFinish ||
+ renderPassClosureReason == RenderPassClosureReason::EGLSwapBuffers;
+ if (frameBoundary)
{
- mShareGroupVk->pruneDefaultBufferPools(mRenderer);
+ if (mShareGroupVk->isDueForBufferPoolPrune(mRenderer))
+ {
+ mShareGroupVk->pruneDefaultBufferPools(mRenderer);
+ }
+ // Always clean up grabage and destroy the excessive free list at frame boundary.
+ mShareGroupVk->cleanupRefCountedEventGarbage(mRenderer);
mRenderer->getRefCountedEventRecycler()->destroy(getDevice());
}
@@ -7991,7 +8003,8 @@ angle::Result ContextVk::flushCommandsAndEndRenderPassWithoutSubmit(RenderPassCl
flushDescriptorSetUpdates();
// Collect RefCountedEvent garbage before submitting to renderer
- mRenderPassCommands->collectRefCountedEventsGarbage(mRenderer);
+ mRenderPassCommands->collectRefCountedEventsGarbage(
+ mShareGroupVk->getRefCountedEventsGarbageRecycler());
// Save the queueSerial before calling flushRenderPassCommands, which may return a new
// mRenderPassCommands
@@ -8275,7 +8288,8 @@ angle::Result ContextVk::flushOutsideRenderPassCommands()
// Track completion of this command buffer.
mOutsideRenderPassCommands->flushSetEvents(this);
- mOutsideRenderPassCommands->collectRefCountedEventsGarbage(mRenderer);
+ mOutsideRenderPassCommands->collectRefCountedEventsGarbage(
+ mShareGroupVk->getRefCountedEventsGarbageRecycler());
// Save the queueSerial before calling flushOutsideRPCommands, which may return a new
// mOutsideRenderPassCommands
diff --git a/src/libANGLE/renderer/vulkan/ShareGroupVk.cpp b/src/libANGLE/renderer/vulkan/ShareGroupVk.cpp
index 26a3c1c1c5..c054a8c010 100644
--- a/src/libANGLE/renderer/vulkan/ShareGroupVk.cpp
+++ b/src/libANGLE/renderer/vulkan/ShareGroupVk.cpp
@@ -160,6 +160,8 @@ void ShareGroupVk::onDestroy(const egl::Display *display)
DisplayVk *displayVk = vk::GetImpl(display);
vk::Renderer *renderer = displayVk->getRenderer();
+ mRefCountedEventsGarbageRecycler.destroy(renderer);
+
for (std::unique_ptr<vk::BufferPool> &pool : mDefaultBufferPools)
{
if (pool)
diff --git a/src/libANGLE/renderer/vulkan/ShareGroupVk.h b/src/libANGLE/renderer/vulkan/ShareGroupVk.h
index 43bfb42250..7768785883 100644
--- a/src/libANGLE/renderer/vulkan/ShareGroupVk.h
+++ b/src/libANGLE/renderer/vulkan/ShareGroupVk.h
@@ -96,6 +96,24 @@ class ShareGroupVk : public ShareGroupImpl
vk::WaitableMonolithicPipelineCreationTask *taskOut);
void waitForCurrentMonolithicPipelineCreationTask();
+ vk::RefCountedEventsGarbageRecycler *getRefCountedEventsGarbageRecycler()
+ {
+ return &mRefCountedEventsGarbageRecycler;
+ }
+ void cleanupRefCountedEventGarbage(vk::Renderer *renderer)
+ {
+ mRefCountedEventsGarbageRecycler.cleanup(renderer);
+ }
+ void cleanupExcessiveRefCountedEventGarbage(vk::Renderer *renderer)
+ {
+ // TODO: b/336844257 needs tune.
+ constexpr size_t kExcessiveGarbageCountThreshold = 256;
+ if (mRefCountedEventsGarbageRecycler.getGarbageCount() > kExcessiveGarbageCountThreshold)
+ {
+ mRefCountedEventsGarbageRecycler.cleanup(renderer);
+ }
+ }
+
private:
angle::Result updateContextsPriority(ContextVk *contextVk, egl::ContextPriority newPriority);
@@ -141,6 +159,9 @@ class ShareGroupVk : public ShareGroupImpl
// Texture update manager used to flush uploaded mutable textures.
TextureUpload mTextureUpload;
+
+ // Holds RefCountedEvent that are free and ready to reuse
+ vk::RefCountedEventsGarbageRecycler mRefCountedEventsGarbageRecycler;
};
} // namespace rx
diff --git a/src/libANGLE/renderer/vulkan/vk_helpers.cpp b/src/libANGLE/renderer/vulkan/vk_helpers.cpp
index 44cff48dc4..1bbc65b6b6 100644
--- a/src/libANGLE/renderer/vulkan/vk_helpers.cpp
+++ b/src/libANGLE/renderer/vulkan/vk_helpers.cpp
@@ -1881,12 +1881,13 @@ void OutsideRenderPassCommandBufferHelper::trackImagesWithEvent(Context *context
flushSetEventsImpl(context, &mCommandBuffer);
}
-void OutsideRenderPassCommandBufferHelper::collectRefCountedEventsGarbage(Renderer *renderer)
+void OutsideRenderPassCommandBufferHelper::collectRefCountedEventsGarbage(
+ RefCountedEventsGarbageRecycler *garbageRecycler)
{
+ ASSERT(garbageRecycler != nullptr);
if (!mRefCountedEventCollector.empty())
{
- renderer->collectRefCountedEventsGarbage(mQueueSerial,
- std::move(mRefCountedEventCollector));
+ garbageRecycler->collectGarbage(mQueueSerial, std::move(mRefCountedEventCollector));
}
}
@@ -2713,12 +2714,13 @@ void RenderPassCommandBufferHelper::executeSetEvents(Context *context,
GetImageLayoutDstStageMask(context, layoutData));
// Note that these events are already added to the garbage collector before command buffer
// leaves ContextVk, so we just need to release the event after use.
- refCountedEvent.release(context->getRenderer());
+ refCountedEvent.release(context);
}
mRefCountedEvents.mask.reset();
}
-void RenderPassCommandBufferHelper::collectRefCountedEventsGarbage(Renderer *renderer)
+void RenderPassCommandBufferHelper::collectRefCountedEventsGarbage(
+ RefCountedEventsGarbageRecycler *garbageRecycler)
{
// For render pass the VkCmdSetEvent works differently from OutsideRenderPassCommands.
// VkCmdEndRenderPass are called in the primary command buffer, and VkCmdSetEvents has to be
@@ -2734,8 +2736,7 @@ void RenderPassCommandBufferHelper::collectRefCountedEventsGarbage(Renderer *ren
if (!mRefCountedEventCollector.empty())
{
- renderer->collectRefCountedEventsGarbage(mQueueSerial,
- std::move(mRefCountedEventCollector));
+ garbageRecycler->collectGarbage(mQueueSerial, std::move(mRefCountedEventCollector));
}
}
@@ -7165,7 +7166,7 @@ void ImageHelper::barrierImpl(Context *context,
{
// For now we always use pipelineBarrier for singlebuffer mode. We could use event here in
// future.
- mCurrentEvent.release(context->getRenderer());
+ mCurrentEvent.release(context);
const ImageMemoryBarrierData &transition = kImageMemoryBarrierData[mCurrentLayout];
VkMemoryBarrier memoryBarrier = {};
@@ -7222,7 +7223,7 @@ void ImageHelper::barrierImpl(Context *context,
}
commandBuffer->imageBarrier(srcStageMask, dstStageMask, imageMemoryBarrier);
// We use pipelineBarrier here, no needs to wait for events any more.
- mCurrentEvent.release(context->getRenderer());
+ mCurrentEvent.release(context);
}
mCurrentLayout = newLayout;
@@ -7416,7 +7417,7 @@ void ImageHelper::updateLayoutAndBarrier(Context *context,
// Release it. No need to garbage collect since we did not use the event here. ALl
// previous use of event should garbage tracked already.
- mCurrentEvent.release(context->getRenderer());
+ mCurrentEvent.release(context);
}
mBarrierQueueSerial = queueSerial;
}
@@ -7531,7 +7532,7 @@ void ImageHelper::updateLayoutAndBarrier(Context *context,
mLastNonShaderReadOnlyLayout = ImageLayout::Undefined;
if (mLastNonShaderReadOnlyEvent.valid())
{
- mLastNonShaderReadOnlyEvent.release(context->getRenderer());
+ mLastNonShaderReadOnlyEvent.release(context);
}
}
@@ -7540,7 +7541,7 @@ void ImageHelper::updateLayoutAndBarrier(Context *context,
const bool isShaderReadOnly = IsShaderReadOnlyLayout(transitionTo);
if (isShaderReadOnly)
{
- mLastNonShaderReadOnlyEvent.release(context->getRenderer());
+ mLastNonShaderReadOnlyEvent.release(context);
mLastNonShaderReadOnlyLayout = mCurrentLayout;
mCurrentShaderReadStageMask = dstStageMask;
}
@@ -7559,7 +7560,7 @@ void ImageHelper::updateLayoutAndBarrier(Context *context,
{
pipelineBarriers->mergeImageBarrier(transitionTo.barrierIndex, srcStageMask,
dstStageMask, imageMemoryBarrier);
- mCurrentEvent.release(context->getRenderer());
+ mCurrentEvent.release(context);
}
mBarrierQueueSerial = queueSerial;
@@ -7580,7 +7581,7 @@ void ImageHelper::setCurrentRefCountedEvent(Context *context, ImageLayoutEventMa
ASSERT(context->getRenderer()->getFeatures().useVkEventForImageBarrier.enabled);
// If there is already an event, release it first.
- mCurrentEvent.release(context->getRenderer());
+ mCurrentEvent.release(context);
// Create the event if we have not yet so. Otherwise just use the already created event. This
// means all images used in the same render pass that has the same layout will be tracked by the
diff --git a/src/libANGLE/renderer/vulkan/vk_helpers.h b/src/libANGLE/renderer/vulkan/vk_helpers.h
index 7a27c1fe24..77e20fe711 100644
--- a/src/libANGLE/renderer/vulkan/vk_helpers.h
+++ b/src/libANGLE/renderer/vulkan/vk_helpers.h
@@ -1527,7 +1527,7 @@ class OutsideRenderPassCommandBufferHelper final : public CommandBufferHelperCom
void flushSetEvents(Context *context) { flushSetEventsImpl(context, &mCommandBuffer); }
// Clean up event garbage. Note that ImageHelper object may still holding reference count to it,
// so the event itself will not gets destroyed until the last refCount goes away.
- void collectRefCountedEventsGarbage(Renderer *renderer);
+ void collectRefCountedEventsGarbage(RefCountedEventsGarbageRecycler *garbageRecycler);
angle::Result flushToPrimary(Context *context, CommandsState *commandsState);
@@ -1877,7 +1877,7 @@ class RenderPassCommandBufferHelper final : public CommandBufferHelperCommon
void updateDepthStencilReadOnlyMode(RenderPassUsageFlags dsUsageFlags,
VkImageAspectFlags dsAspectFlags);
- void collectRefCountedEventsGarbage(Renderer *renderer);
+ void collectRefCountedEventsGarbage(RefCountedEventsGarbageRecycler *garbageRecycler);
private:
uint32_t getSubpassCommandBufferCount() const { return mCurrentSubpassCommandBufferIndex + 1; }
diff --git a/src/libANGLE/renderer/vulkan/vk_ref_counted_event.cpp b/src/libANGLE/renderer/vulkan/vk_ref_counted_event.cpp
index 6f93aea8bf..336f3af705 100644
--- a/src/libANGLE/renderer/vulkan/vk_ref_counted_event.cpp
+++ b/src/libANGLE/renderer/vulkan/vk_ref_counted_event.cpp
@@ -21,7 +21,8 @@ bool RefCountedEvent::init(Context *context, ImageLayout layout)
ASSERT(layout != ImageLayout::Undefined);
// First try with recycler. We must issue VkCmdResetEvent before VkCmdSetEvent
- if (context->getRenderer()->getRefCountedEventRecycler()->fetch(this))
+ if (context->getRefCountedEventsGarbageRecycler()->fetch(this) ||
+ context->getRenderer()->getRefCountedEventRecycler()->fetch(this))
{
mHandle->get().needsReset = true;
}
@@ -40,7 +41,7 @@ bool RefCountedEvent::init(Context *context, ImageLayout layout)
{
WARN() << "event.init failed. Clean up garbage and retry again";
// Proactively clean up garbage and retry
- context->getRenderer()->cleanupGarbage();
+ context->getRefCountedEventsGarbageRecycler()->cleanup(context->getRenderer());
result = mHandle->get().event.init(context->getDevice(), createInfo);
if (result != VK_SUCCESS)
{
@@ -62,6 +63,14 @@ bool RefCountedEvent::init(Context *context, ImageLayout layout)
return true;
}
+void RefCountedEvent::release(Context *context)
+{
+ if (mHandle != nullptr)
+ {
+ releaseImpl(context->getRenderer(), context->getRefCountedEventsGarbageRecycler());
+ }
+}
+
void RefCountedEvent::release(Renderer *renderer)
{
if (mHandle != nullptr)
@@ -77,6 +86,11 @@ void RefCountedEvent::releaseImpl(Renderer *renderer, RecyclerT *recycler)
const bool isLastReference = mHandle->getAndReleaseRef() == 1;
if (isLastReference)
{
+ // When async submission is enabled, recycler will be null when release call comes from
+ // CommandProcessor. But in that case it will not be the last reference since garbage
+ // collector should have one reference count and will never release that reference count
+ // until GPU finished.
+ ASSERT(recycler != nullptr);
recycler->recycle(std::move(*this));
ASSERT(mHandle == nullptr);
}
@@ -95,22 +109,86 @@ void RefCountedEvent::destroy(VkDevice device)
}
// RefCountedEventsGarbage implementation.
-bool RefCountedEventsGarbage::destroyIfComplete(Renderer *renderer)
+bool RefCountedEventsGarbage::releaseIfComplete(Renderer *renderer,
+ RefCountedEventsGarbageRecycler *recycler)
{
if (!renderer->hasResourceUseFinished(mLifetime))
{
return false;
}
- RefCountedEventRecycler *recycler = renderer->getRefCountedEventRecycler();
- recycler->releaseOrRecycle(renderer, std::move(mRefCountedEvents));
-
+ for (RefCountedEvent &event : mRefCountedEvents)
+ {
+ ASSERT(event.valid());
+ event.releaseImpl(renderer, recycler);
+ ASSERT(!event.valid());
+ }
+ mRefCountedEvents.clear();
return true;
}
-bool RefCountedEventsGarbage::hasResourceUseSubmitted(Renderer *renderer) const
+void RefCountedEventsGarbage::destroy(Renderer *renderer)
+{
+ ASSERT(renderer->hasResourceUseFinished(mLifetime));
+ for (RefCountedEvent &event : mRefCountedEvents)
+ {
+ ASSERT(event.valid());
+ event.release(renderer);
+ }
+ mRefCountedEvents.clear();
+}
+
+// RefCountedEventsGarbageRecycler implementation.
+RefCountedEventsGarbageRecycler::~RefCountedEventsGarbageRecycler()
+{
+ ASSERT(mFreeStack.empty());
+ ASSERT(mGarbageQueue.empty());
+}
+
+void RefCountedEventsGarbageRecycler::destroy(Renderer *renderer)
+{
+ while (!mGarbageQueue.empty())
+ {
+ mGarbageQueue.front().destroy(renderer);
+ mGarbageQueue.pop();
+ }
+
+ mFreeStack.destroy(renderer->getDevice());
+}
+
+void RefCountedEventsGarbageRecycler::cleanup(Renderer *renderer)
+{
+ // Destroy free stack first. The garbage clean up process will add more events to the free
+ // stack. If everything is stable between each frame, grabage should release enough events to
+ // recycler for next frame's needs.
+ mFreeStack.destroy(renderer->getDevice());
+
+ while (!mGarbageQueue.empty())
+ {
+ size_t count = mGarbageQueue.front().size();
+ bool released = mGarbageQueue.front().releaseIfComplete(renderer, this);
+ if (released)
+ {
+ mGarbageCount -= count;
+ mGarbageQueue.pop();
+ }
+ else
+ {
+ break;
+ }
+ }
+}
+
+bool RefCountedEventsGarbageRecycler::fetch(RefCountedEvent *outObject)
{
- return renderer->hasResourceUseSubmitted(mLifetime);
+ if (!mFreeStack.empty())
+ {
+ mFreeStack.fetch(outObject);
+ ASSERT(outObject->valid());
+ ASSERT(!outObject->mHandle->isReferenced());
+ return true;
+ }
+ return false;
}
// EventBarrier implementation.
diff --git a/src/libANGLE/renderer/vulkan/vk_ref_counted_event.h b/src/libANGLE/renderer/vulkan/vk_ref_counted_event.h
index e657eef0dd..794c46e65a 100644
--- a/src/libANGLE/renderer/vulkan/vk_ref_counted_event.h
+++ b/src/libANGLE/renderer/vulkan/vk_ref_counted_event.h
@@ -104,6 +104,10 @@ class RefCountedEvent final
// to renderer's recycler if this is the very last reference.
void release(Renderer *renderer);
+ // Release one reference count to the underline Event object and destroy or recycle the handle
+ // to the context share group's recycler if this is the very last reference.
+ void release(Context *context);
+
// Destroy the event and mHandle. Caller must ensure there is no outstanding reference to the
// mHandle.
void destroy(VkDevice device);
@@ -134,6 +138,8 @@ class RefCountedEvent final
// Release one reference count to the underline Event object and destroy or recycle the handle
// to the provided recycler if this is the very last reference.
friend class RefCountedEventRecycler;
+ friend class RefCountedEventsGarbage;
+ friend class RefCountedEventsGarbageRecycler;
template <typename RecyclerT>
void releaseImpl(Renderer *renderer, RecyclerT *recycler);
@@ -174,9 +180,12 @@ class RefCountedEventsGarbage final
return *this;
}
- bool destroyIfComplete(Renderer *renderer);
- bool hasResourceUseSubmitted(Renderer *renderer) const;
- VkDeviceSize getSize() const { return mRefCountedEvents.size(); }
+ void destroy(Renderer *renderer);
+
+ // Check the queue serial and release the events to context if GPU finished. Note that release
+ // to context may end up recycle the object instead of destroy. Returns true if it is GPU
+ // finished.
+ bool releaseIfComplete(Renderer *renderer, RefCountedEventsGarbageRecycler *recycler);
// Move event to the garbage list
void add(RefCountedEvent &&event) { mRefCountedEvents.emplace_back(std::move(event)); }
@@ -204,17 +213,22 @@ class RefCountedEventsGarbage final
bool empty() const { return mRefCountedEvents.empty(); }
+ size_t size() const { return mRefCountedEvents.size(); }
+
private:
+ friend class RefCountedEventsGarbageRecycler;
ResourceUse mLifetime;
RefCountedEventCollector mRefCountedEvents;
};
-// Thread safe event recycler
+// Thread safe event recycler, protected by its own lock.
class RefCountedEventRecycler final
{
public:
void recycle(RefCountedEvent &&garbageObject)
{
+ ASSERT(garbageObject.valid());
+ ASSERT(!garbageObject.mHandle->isReferenced());
std::lock_guard<angle::SimpleMutex> lock(mMutex);
mFreeStack.recycle(std::move(garbageObject));
}
@@ -239,6 +253,8 @@ class RefCountedEventRecycler final
return false;
}
mFreeStack.fetch(outObject);
+ ASSERT(outObject->valid());
+ ASSERT(!outObject->mHandle->isReferenced());
return true;
}
@@ -253,6 +269,44 @@ class RefCountedEventRecycler final
Recycler<RefCountedEvent> mFreeStack;
};
+// Not thread safe event garbage collection and recycler. Caller must ensure the thread safety. It
+// is intended to use by ShareGroupVk which all access should already protected by share context
+// lock.
+class RefCountedEventsGarbageRecycler final
+{
+ public:
+ RefCountedEventsGarbageRecycler() : mGarbageCount(0) {}
+ ~RefCountedEventsGarbageRecycler();
+
+ // Release all garbage and free events.
+ void destroy(Renderer *renderer);
+
+ // Walk the garbage list and move completed garbage to free list
+ void cleanup(Renderer *renderer);
+
+ void collectGarbage(const QueueSerial &queueSerial, RefCountedEventCollector &&refCountedEvents)
+ {
+ mGarbageCount += refCountedEvents.size();
+ mGarbageQueue.emplace(queueSerial, std::move(refCountedEvents));
+ }
+
+ void recycle(RefCountedEvent &&garbageObject)
+ {
+ ASSERT(garbageObject.valid());
+ ASSERT(!garbageObject.mHandle->isReferenced());
+ mFreeStack.recycle(std::move(garbageObject));
+ }
+
+ bool fetch(RefCountedEvent *outObject);
+
+ size_t getGarbageCount() const { return mGarbageCount; }
+
+ private:
+ Recycler<RefCountedEvent> mFreeStack;
+ std::queue<RefCountedEventsGarbage> mGarbageQueue;
+ size_t mGarbageCount;
+};
+
// This wraps data and API for vkCmdWaitEvent call
class EventBarrier : angle::NonCopyable
{
diff --git a/src/libANGLE/renderer/vulkan/vk_renderer.cpp b/src/libANGLE/renderer/vulkan/vk_renderer.cpp
index f6486e2fee..6c12e4ee06 100644
--- a/src/libANGLE/renderer/vulkan/vk_renderer.cpp
+++ b/src/libANGLE/renderer/vulkan/vk_renderer.cpp
@@ -1494,8 +1494,7 @@ Renderer::~Renderer() {}
bool Renderer::hasSharedGarbage()
{
- return !mSharedGarbageList.empty() || !mSuballocationGarbageList.empty() ||
- !mRefCountedEventGarbageList.empty();
+ return !mSharedGarbageList.empty() || !mSuballocationGarbageList.empty();
}
void Renderer::onDestroy(vk::Context *context)
@@ -5508,8 +5507,6 @@ void Renderer::cleanupGarbage()
// Note: do this after clean up mSuballocationGarbageList so that we will have more chances to
// find orphaned blocks being empty.
mOrphanedBufferBlockList.pruneEmptyBufferBlocks(this);
- // Clean up event garbages
- mRefCountedEventGarbageList.cleanupSubmittedGarbage(this);
}
void Renderer::cleanupPendingSubmissionGarbage()
@@ -5517,7 +5514,6 @@ void Renderer::cleanupPendingSubmissionGarbage()
// Check if pending garbage is still pending. If not, move them to the garbage list.
mSharedGarbageList.cleanupUnsubmittedGarbage(this);
mSuballocationGarbageList.cleanupUnsubmittedGarbage(this);
- mRefCountedEventGarbageList.cleanupUnsubmittedGarbage(this);
}
void Renderer::onNewValidationMessage(const std::string &message)
diff --git a/src/libANGLE/renderer/vulkan/vk_renderer.h b/src/libANGLE/renderer/vulkan/vk_renderer.h
index bc168e1748..00c52ebf31 100644
--- a/src/libANGLE/renderer/vulkan/vk_renderer.h
+++ b/src/libANGLE/renderer/vulkan/vk_renderer.h
@@ -339,14 +339,6 @@ class Renderer : angle::NonCopyable
mSuballocationGarbageList.add(this, std::move(garbage));
}
- void collectRefCountedEventsGarbage(const QueueSerial &queueSerial,
- vk::RefCountedEventCollector &&refCountedEvents)
- {
- ASSERT(!refCountedEvents.empty());
- vk::RefCountedEventsGarbage garbage(queueSerial, std::move(refCountedEvents));
- mRefCountedEventGarbageList.add(this, std::move(garbage));
- }
-
angle::Result getPipelineCache(vk::Context *context, vk::PipelineCacheAccess *pipelineCacheOut);
angle::Result mergeIntoPipelineCache(vk::Context *context,
const vk::PipelineCache &pipelineCache);
@@ -959,8 +951,6 @@ class Renderer : angle::NonCopyable
vk::SharedGarbageList<vk::BufferSuballocationGarbage> mSuballocationGarbageList;
// Holds orphaned BufferBlocks when ShareGroup gets destroyed
vk::BufferBlockGarbageList mOrphanedBufferBlockList;
- // Holds RefCountedEvent garbage
- vk::SharedGarbageList<vk::RefCountedEventsGarbage> mRefCountedEventGarbageList;
// Holds RefCountedEvent that are free and ready to reuse
vk::RefCountedEventRecycler mRefCountedEventRecycler;
diff --git a/src/libANGLE/renderer/vulkan/vk_utils.cpp b/src/libANGLE/renderer/vulkan/vk_utils.cpp
index 3c4390f7a1..0699e95e83 100644
--- a/src/libANGLE/renderer/vulkan/vk_utils.cpp
+++ b/src/libANGLE/renderer/vulkan/vk_utils.cpp
@@ -370,7 +370,9 @@ VkImageAspectFlags GetFormatAspectFlags(const angle::Format &format)
}
// Context implementation.
-Context::Context(Renderer *renderer) : mRenderer(renderer), mPerfCounters{} {}
+Context::Context(Renderer *renderer)
+ : mRenderer(renderer), mShareGroupRefCountedEventsGarbageRecycler(nullptr), mPerfCounters{}
+{}
Context::~Context() {}
diff --git a/src/libANGLE/renderer/vulkan/vk_utils.h b/src/libANGLE/renderer/vulkan/vk_utils.h
index dc4ebdf9d6..3e4d4e4e2e 100644
--- a/src/libANGLE/renderer/vulkan/vk_utils.h
+++ b/src/libANGLE/renderer/vulkan/vk_utils.h
@@ -278,6 +278,7 @@ class [[nodiscard]] ScopedQueueSerialIndex final : angle::NonCopyable
QueueSerialIndexAllocator *mIndexAllocator;
};
+class RefCountedEventsGarbageRecycler;
// Abstracts error handling. Implemented by ContextVk for GL, DisplayVk for EGL, worker threads,
// CLContextVk etc.
class Context : angle::NonCopyable
@@ -296,9 +297,15 @@ class Context : angle::NonCopyable
const angle::VulkanPerfCounters &getPerfCounters() const { return mPerfCounters; }
angle::VulkanPerfCounters &getPerfCounters() { return mPerfCounters; }
+ RefCountedEventsGarbageRecycler *getRefCountedEventsGarbageRecycler()
+ {
+ return mShareGroupRefCountedEventsGarbageRecycler;
+ }
protected:
Renderer *const mRenderer;
+ // Stash the ShareGroupVk's RefCountedEventRecycler here ImageHelper to conveniently access
+ RefCountedEventsGarbageRecycler *mShareGroupRefCountedEventsGarbageRecycler;
angle::VulkanPerfCounters mPerfCounters;
};