diff --git a/src/video_core/renderer_vulkan/vk_scheduler.cpp b/src/video_core/renderer_vulkan/vk_scheduler.cpp index 39ffb59f..903efc40 100644 --- a/src/video_core/renderer_vulkan/vk_scheduler.cpp +++ b/src/video_core/renderer_vulkan/vk_scheduler.cpp @@ -100,6 +100,12 @@ void Scheduler::SubmitExecution(vk::Semaphore signal_semaphore, vk::Semaphore wa master_semaphore.SubmitWork(current_cmdbuf, wait_semaphore, signal_semaphore, signal_value); master_semaphore.Refresh(); AllocateWorkerCommandBuffers(); + + // Apply pending operations + while (IsFree(pending_ops.back().gpu_tick)) { + pending_ops.back().callback(); + pending_ops.pop(); + } } } // namespace Vulkan diff --git a/src/video_core/renderer_vulkan/vk_scheduler.h b/src/video_core/renderer_vulkan/vk_scheduler.h index 7ee35361..b4504274 100644 --- a/src/video_core/renderer_vulkan/vk_scheduler.h +++ b/src/video_core/renderer_vulkan/vk_scheduler.h @@ -71,6 +71,11 @@ public: return &master_semaphore; } + /// Defers an operation until the gpu has reached the current cpu tick. + void DeferOperation(auto&& func) { + pending_ops.emplace(func, CurrentTick()); + } + std::mutex submit_mutex; private: @@ -84,6 +89,11 @@ private: CommandPool command_pool; vk::CommandBuffer current_cmdbuf; std::condition_variable_any event_cv; + struct PendingOp { + std::function callback; + u64 gpu_tick; + }; + std::queue pending_ops; RenderState render_state; bool is_rendering = false; tracy::VkCtxScope* profiler_scope{};