yuzu/src/video_core/fence_manager.h

178 lines
5.5 KiB
C++
Raw Normal View History

2020-02-18 03:19:26 +03:00
// Copyright 2020 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#pragma once
#include <algorithm>
#include <queue>
2020-02-18 03:19:26 +03:00
#include "common/common_types.h"
#include "core/core.h"
#include "video_core/delayed_destruction_ring.h"
2020-02-18 03:19:26 +03:00
#include "video_core/gpu.h"
#include "video_core/memory_manager.h"
#include "video_core/rasterizer_interface.h"
namespace VideoCommon {
class FenceBase {
public:
explicit FenceBase(u32 payload_, bool is_stubbed_)
: address{}, payload{payload_}, is_semaphore{false}, is_stubbed{is_stubbed_} {}
explicit FenceBase(GPUVAddr address_, u32 payload_, bool is_stubbed_)
: address{address_}, payload{payload_}, is_semaphore{true}, is_stubbed{is_stubbed_} {}
2020-02-18 03:19:26 +03:00
2020-04-16 19:29:53 +03:00
GPUVAddr GetAddress() const {
2020-02-18 03:19:26 +03:00
return address;
}
2020-04-16 19:29:53 +03:00
u32 GetPayload() const {
2020-02-18 03:19:26 +03:00
return payload;
}
2020-04-16 19:29:53 +03:00
bool IsSemaphore() const {
return is_semaphore;
}
2020-02-18 03:19:26 +03:00
private:
GPUVAddr address;
u32 payload;
bool is_semaphore;
protected:
bool is_stubbed;
2020-02-18 03:19:26 +03:00
};
2020-04-15 23:36:14 +03:00
template <typename TFence, typename TTextureCache, typename TTBufferCache, typename TQueryCache>
2020-02-18 03:19:26 +03:00
class FenceManager {
public:
/// Notify the fence manager about a new frame
void TickFrame() {
delayed_destruction_ring.Tick();
}
void SignalSemaphore(GPUVAddr addr, u32 value) {
2020-02-18 03:19:26 +03:00
TryReleasePendingFences();
2020-04-22 18:14:40 +03:00
const bool should_flush = ShouldFlush();
2020-04-16 19:29:53 +03:00
CommitAsyncFlushes();
TFence new_fence = CreateFence(addr, value, !should_flush);
2020-02-19 17:49:07 +03:00
fences.push(new_fence);
QueueFence(new_fence);
if (should_flush) {
rasterizer.FlushCommands();
}
rasterizer.SyncGuestHost();
}
void SignalSyncPoint(u32 value) {
TryReleasePendingFences();
2020-04-22 18:14:40 +03:00
const bool should_flush = ShouldFlush();
2020-04-16 19:29:53 +03:00
CommitAsyncFlushes();
TFence new_fence = CreateFence(value, !should_flush);
fences.push(new_fence);
QueueFence(new_fence);
if (should_flush) {
rasterizer.FlushCommands();
}
2020-02-18 03:19:26 +03:00
rasterizer.SyncGuestHost();
}
void WaitPendingFences() {
while (!fences.empty()) {
TFence& current_fence = fences.front();
2020-04-16 19:29:53 +03:00
if (ShouldWait()) {
2020-02-18 05:15:43 +03:00
WaitFence(current_fence);
}
2020-04-16 19:29:53 +03:00
PopAsyncFlushes();
if (current_fence->IsSemaphore()) {
gpu_memory.template Write<u32>(current_fence->GetAddress(),
current_fence->GetPayload());
} else {
gpu.IncrementSyncPoint(current_fence->GetPayload());
}
PopFence();
2020-02-18 03:19:26 +03:00
}
}
protected:
explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
TTextureCache& texture_cache_, TTBufferCache& buffer_cache_,
TQueryCache& query_cache_)
: rasterizer{rasterizer_}, gpu{gpu_}, gpu_memory{gpu.MemoryManager()},
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {}
2020-02-18 03:19:26 +03:00
virtual ~FenceManager() = default;
2020-04-16 19:29:53 +03:00
/// Creates a Sync Point Fence Interface, does not create a backend fence if 'is_stubbed' is
/// true
virtual TFence CreateFence(u32 value, bool is_stubbed) = 0;
2020-04-16 19:29:53 +03:00
/// Creates a Semaphore Fence Interface, does not create a backend fence if 'is_stubbed' is true
virtual TFence CreateFence(GPUVAddr addr, u32 value, bool is_stubbed) = 0;
2020-04-16 19:29:53 +03:00
/// Queues a fence into the backend if the fence isn't stubbed.
2020-02-18 03:19:26 +03:00
virtual void QueueFence(TFence& fence) = 0;
2020-04-16 19:29:53 +03:00
/// Notifies that the backend fence has been signaled/reached in host GPU.
virtual bool IsFenceSignaled(TFence& fence) const = 0;
/// Waits until a fence has been signalled by the host GPU.
2020-02-18 03:19:26 +03:00
virtual void WaitFence(TFence& fence) = 0;
VideoCore::RasterizerInterface& rasterizer;
Tegra::GPU& gpu;
Tegra::MemoryManager& gpu_memory;
2020-02-18 03:19:26 +03:00
TTextureCache& texture_cache;
TTBufferCache& buffer_cache;
2020-04-15 23:36:14 +03:00
TQueryCache& query_cache;
2020-02-18 03:19:26 +03:00
private:
void TryReleasePendingFences() {
while (!fences.empty()) {
TFence& current_fence = fences.front();
2020-04-16 19:29:53 +03:00
if (ShouldWait() && !IsFenceSignaled(current_fence)) {
2020-02-18 03:19:26 +03:00
return;
}
2020-04-16 19:29:53 +03:00
PopAsyncFlushes();
if (current_fence->IsSemaphore()) {
gpu_memory.template Write<u32>(current_fence->GetAddress(),
current_fence->GetPayload());
} else {
gpu.IncrementSyncPoint(current_fence->GetPayload());
}
PopFence();
2020-02-18 03:19:26 +03:00
}
}
2020-04-16 19:29:53 +03:00
bool ShouldWait() const {
return texture_cache.ShouldWaitAsyncFlushes() || buffer_cache.ShouldWaitAsyncFlushes() ||
query_cache.ShouldWaitAsyncFlushes();
}
bool ShouldFlush() const {
return texture_cache.HasUncommittedFlushes() || buffer_cache.HasUncommittedFlushes() ||
query_cache.HasUncommittedFlushes();
}
void PopAsyncFlushes() {
texture_cache.PopAsyncFlushes();
buffer_cache.PopAsyncFlushes();
query_cache.PopAsyncFlushes();
}
void CommitAsyncFlushes() {
texture_cache.CommitAsyncFlushes();
buffer_cache.CommitAsyncFlushes();
query_cache.CommitAsyncFlushes();
}
void PopFence() {
delayed_destruction_ring.Push(std::move(fences.front()));
fences.pop();
}
2020-02-18 03:19:26 +03:00
std::queue<TFence> fences;
DelayedDestructionRing<TFence, 6> delayed_destruction_ring;
2020-02-18 03:19:26 +03:00
};
} // namespace VideoCommon