1
0
mirror of https://github.com/RPCS3/rpcs3.git synced 2025-02-01 04:51:49 +01:00

vk: Inject memory barrier upon conclusion of a framebuffer feedback loop

- Do not write to the texture until previous draw call is completed using it.
- This is usually not much of a problem until blending operations come into play.
This commit is contained in:
kd-11 2020-07-07 22:39:22 +03:00 committed by kd-11
parent 5fae1b3637
commit 987ede2e6c
2 changed files with 65 additions and 2 deletions

View File

@ -15,6 +15,9 @@ namespace vk
class render_target : public viewable_image, public rsx::ref_counted, public rsx::render_target_descriptor<vk::viewable_image*>
{
u64 cyclic_reference_sync_tag = 0;
u64 write_barrier_sync_tag = 0;
// Get the linear resolve target bound to this surface. Initialize if none exists
vk::viewable_image* get_resolve_target_safe(vk::command_buffer& cmd)
{
@ -357,6 +360,23 @@ namespace vk
return (rsx::apply_resolution_scale(_width, true) == width()) && (rsx::apply_resolution_scale(_height, true) == height());
}
void texture_barrier(vk::command_buffer& cmd)
{
if (samples() == 1)
{
if (!write_barrier_sync_tag) write_barrier_sync_tag++; // Activate barrier sync
cyclic_reference_sync_tag = write_barrier_sync_tag; // Match tags
}
vk::insert_texture_barrier(cmd, this, VK_IMAGE_LAYOUT_GENERAL);
}
void reset_surface_counters()
{
frame_tag = 0;
write_barrier_sync_tag = 0;
}
image_view* get_view(u32 remap_encoding, const std::pair<std::array<u8, 4>, std::array<u8, 4>>& remap,
VkImageAspectFlags mask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT) override
{
@ -394,6 +414,49 @@ namespace vk
}
}
if (!read_access && write_barrier_sync_tag != 0)
{
if (current_layout == VK_IMAGE_LAYOUT_GENERAL)
{
if (write_barrier_sync_tag != cyclic_reference_sync_tag)
{
// This barrier catches a very specific case where 2 draw calls are executed with general layout (cyclic ref) but no texture barrier in between.
// This happens when a cyclic ref is broken. In this case previous draw must finish drawing before the new one renders to avoid current draw breaking previous one.
VkPipelineStageFlags src_stage, dst_stage;
VkAccessFlags src_access, dst_access;
if (!is_depth_surface()) [[likely]]
{
src_stage = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
dst_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
src_access = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
dst_access = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
}
else
{
src_stage = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
dst_stage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
src_access = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
dst_access = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
}
vk::insert_image_memory_barrier(cmd, value, VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
src_stage, dst_stage, src_access, dst_access, { aspect(), 0, 1, 0, 1 });
write_barrier_sync_tag = 0; // Disable for next draw
}
else
{
// Synced externally for this draw
write_barrier_sync_tag++;
}
}
else
{
write_barrier_sync_tag = 0; // Disable
}
}
if (old_contents.empty()) [[likely]]
{
if (state_flags & rsx::surface_state_flags::erase_bkgnd)
@ -771,7 +834,7 @@ namespace rsx
surface->change_layout(cmd, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
}
surface->frame_tag = 0;
surface->reset_surface_counters();
surface->memory_usage_flags |= rsx::surface_usage_flags::attachment;
}

View File

@ -1313,7 +1313,7 @@ namespace vk
void insert_texture_barrier(vk::command_buffer& cmd, vk::image* tex) override
{
vk::insert_texture_barrier(cmd, tex, VK_IMAGE_LAYOUT_GENERAL);
vk::as_rtt(tex)->texture_barrier(cmd);
}
bool render_target_format_is_compatible(vk::image* tex, u32 gcm_format) override