yuzu/src/video_core/fence_manager.h

253 lines
8.0 KiB
C++
Raw Normal View History

// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
2020-02-18 00:19:26 +00:00
#pragma once
#include <algorithm>
#include <condition_variable>
#include <cstring>
2022-02-06 00:16:11 +00:00
#include <deque>
#include <functional>
#include <memory>
#include <mutex>
#include <thread>
#include <queue>
2020-02-18 00:19:26 +00:00
#include "common/common_types.h"
#include "common/microprofile.h"
#include "common/scope_exit.h"
#include "common/settings.h"
#include "common/thread.h"
#include "video_core/delayed_destruction_ring.h"
2020-02-18 00:19:26 +00:00
#include "video_core/gpu.h"
2022-01-30 09:31:13 +00:00
#include "video_core/host1x/host1x.h"
#include "video_core/host1x/syncpoint_manager.h"
2020-02-18 00:19:26 +00:00
#include "video_core/rasterizer_interface.h"
namespace VideoCommon {
class FenceBase {
public:
2022-02-06 00:16:11 +00:00
explicit FenceBase(bool is_stubbed_) : is_stubbed{is_stubbed_} {}
bool IsStubbed() const {
return is_stubbed;
}
protected:
bool is_stubbed;
2020-02-18 00:19:26 +00:00
};
template <typename Traits>
2020-02-18 00:19:26 +00:00
class FenceManager {
using TFence = typename Traits::FenceType;
using TTextureCache = typename Traits::TextureCacheType;
using TBufferCache = typename Traits::BufferCacheType;
using TQueryCache = typename Traits::QueryCacheType;
static constexpr bool can_async_check = Traits::HAS_ASYNC_CHECK;
2020-02-18 00:19:26 +00:00
public:
/// Notify the fence manager about a new frame
void TickFrame() {
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Tick();
}
2021-07-07 14:42:26 +00:00
// Unlike other fences, this one doesn't
void SignalOrdering() {
2023-04-30 15:14:06 +00:00
std::function<void()> do_nothing([]{});
SignalFence(std::move(do_nothing));
2021-07-07 14:42:26 +00:00
}
2022-02-06 00:16:11 +00:00
void SyncOperation(std::function<void()>&& func) {
uncommitted_operations.emplace_back(std::move(func));
}
void SignalFence(std::function<void()>&& func) {
2023-04-28 21:53:46 +00:00
rasterizer.InvalidateGPUCache();
bool delay_fence = Settings::IsGPULevelHigh();
if constexpr (!can_async_check) {
TryReleasePendingFences<false>();
}
2020-04-22 15:14:40 +00:00
const bool should_flush = ShouldFlush();
2020-04-16 16:29:53 +00:00
CommitAsyncFlushes();
2022-02-06 00:16:11 +00:00
TFence new_fence = CreateFence(!should_flush);
if constexpr (can_async_check) {
guard.lock();
}
if (delay_fence) {
uncommitted_operations.emplace_back(std::move(func));
}
pending_operations.emplace_back(std::move(uncommitted_operations));
2020-02-19 14:49:07 +00:00
QueueFence(new_fence);
if (!delay_fence) {
func();
}
fences.push(std::move(new_fence));
if (should_flush) {
rasterizer.FlushCommands();
}
if constexpr (can_async_check) {
guard.unlock();
cv.notify_all();
}
}
void SignalSyncPoint(u32 value) {
2022-01-30 09:31:13 +00:00
syncpoint_manager.IncrementGuest(value);
2022-02-06 00:16:11 +00:00
std::function<void()> func([this, value] { syncpoint_manager.IncrementHost(value); });
SignalFence(std::move(func));
2020-02-18 00:19:26 +00:00
}
void WaitPendingFences() {
if constexpr (!can_async_check) {
TryReleasePendingFences<true>();
2020-02-18 00:19:26 +00:00
}
}
protected:
explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
TTextureCache& texture_cache_, TBufferCache& buffer_cache_,
TQueryCache& query_cache_)
2022-01-30 09:31:13 +00:00
: rasterizer{rasterizer_}, gpu{gpu_}, syncpoint_manager{gpu.Host1x().GetSyncpointManager()},
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {
if constexpr (can_async_check) {
fence_thread =
std::jthread([this](std::stop_token token) { ReleaseThreadFunc(token); });
}
}
2020-02-18 00:19:26 +00:00
virtual ~FenceManager() {
if constexpr (can_async_check) {
fence_thread.request_stop();
cv.notify_all();
fence_thread.join();
}
}
2020-04-16 16:29:53 +00:00
2022-02-06 00:16:11 +00:00
/// Creates a Fence Interface, does not create a backend fence if 'is_stubbed' is
2020-04-16 16:29:53 +00:00
/// true
2022-02-06 00:16:11 +00:00
virtual TFence CreateFence(bool is_stubbed) = 0;
2020-04-16 16:29:53 +00:00
/// Queues a fence into the backend if the fence isn't stubbed.
2020-02-18 00:19:26 +00:00
virtual void QueueFence(TFence& fence) = 0;
2020-04-16 16:29:53 +00:00
/// Notifies that the backend fence has been signaled/reached in host GPU.
virtual bool IsFenceSignaled(TFence& fence) const = 0;
/// Waits until a fence has been signalled by the host GPU.
2020-02-18 00:19:26 +00:00
virtual void WaitFence(TFence& fence) = 0;
VideoCore::RasterizerInterface& rasterizer;
Tegra::GPU& gpu;
2022-01-30 09:31:13 +00:00
Tegra::Host1x::SyncpointManager& syncpoint_manager;
2020-02-18 00:19:26 +00:00
TTextureCache& texture_cache;
TBufferCache& buffer_cache;
2020-04-15 20:36:14 +00:00
TQueryCache& query_cache;
2020-02-18 00:19:26 +00:00
private:
template <bool force_wait>
2020-02-18 00:19:26 +00:00
void TryReleasePendingFences() {
while (!fences.empty()) {
TFence& current_fence = fences.front();
2020-04-16 16:29:53 +00:00
if (ShouldWait() && !IsFenceSignaled(current_fence)) {
if constexpr (force_wait) {
WaitFence(current_fence);
} else {
return;
}
2020-02-18 00:19:26 +00:00
}
2020-04-16 16:29:53 +00:00
PopAsyncFlushes();
2022-02-06 00:16:11 +00:00
auto operations = std::move(pending_operations.front());
pending_operations.pop_front();
for (auto& operation : operations) {
operation();
}
{
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Push(std::move(current_fence));
}
fences.pop();
}
}
void ReleaseThreadFunc(std::stop_token stop_token) {
std::string name = "GPUFencingThread";
MicroProfileOnThreadCreate(name.c_str());
// Cleanup
SCOPE_EXIT({ MicroProfileOnThreadExit(); });
Common::SetCurrentThreadName(name.c_str());
Common::SetCurrentThreadPriority(Common::ThreadPriority::High);
TFence current_fence;
std::deque<std::function<void()>> current_operations;
while (!stop_token.stop_requested()) {
{
std::unique_lock lock(guard);
cv.wait(lock, [&] { return stop_token.stop_requested() || !fences.empty(); });
if (stop_token.stop_requested()) [[unlikely]] {
return;
}
current_fence = std::move(fences.front());
current_operations = std::move(pending_operations.front());
fences.pop();
pending_operations.pop_front();
}
if (!current_fence->IsStubbed()) {
WaitFence(current_fence);
}
PopAsyncFlushes();
for (auto& operation : current_operations) {
operation();
}
{
std::unique_lock lock(ring_guard);
delayed_destruction_ring.Push(std::move(current_fence));
}
2020-02-18 00:19:26 +00:00
}
}
2020-04-16 16:29:53 +00:00
bool ShouldWait() const {
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
2020-04-16 16:29:53 +00:00
return texture_cache.ShouldWaitAsyncFlushes() || buffer_cache.ShouldWaitAsyncFlushes() ||
query_cache.ShouldWaitAsyncFlushes();
}
bool ShouldFlush() const {
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
2020-04-16 16:29:53 +00:00
return texture_cache.HasUncommittedFlushes() || buffer_cache.HasUncommittedFlushes() ||
query_cache.HasUncommittedFlushes();
}
void PopAsyncFlushes() {
2022-02-06 00:16:11 +00:00
{
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
texture_cache.PopAsyncFlushes();
buffer_cache.PopAsyncFlushes();
}
2020-04-16 16:29:53 +00:00
query_cache.PopAsyncFlushes();
}
void CommitAsyncFlushes() {
2022-02-06 00:16:11 +00:00
{
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
texture_cache.CommitAsyncFlushes();
buffer_cache.CommitAsyncFlushes();
}
2020-04-16 16:29:53 +00:00
query_cache.CommitAsyncFlushes();
}
2020-02-18 00:19:26 +00:00
std::queue<TFence> fences;
2022-02-06 00:16:11 +00:00
std::deque<std::function<void()>> uncommitted_operations;
std::deque<std::deque<std::function<void()>>> pending_operations;
std::mutex guard;
std::mutex ring_guard;
std::condition_variable cv;
std::jthread fence_thread;
DelayedDestructionRing<TFence, 6> delayed_destruction_ring;
2020-02-18 00:19:26 +00:00
};
} // namespace VideoCommon