Merge pull request #7399 from ameerj/art-refactor

video_core: Refactoring post A.R.T. merge
This commit is contained in:
Fernando S 2021-12-18 07:09:58 +01:00 committed by GitHub
commit 04b4f3b051
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 147 additions and 152 deletions

View file

@ -92,7 +92,7 @@ public:
void ReinterpretImage(Image& dst, Image& src, std::span<const VideoCommon::ImageCopy> copies);
void ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view, bool rescaled) {
void ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view) {
UNIMPLEMENTED();
}

View file

@ -437,39 +437,29 @@ void RendererOpenGL::DrawScreen(const Layout::FramebufferLayout& layout) {
glBindTextureUnit(0, fxaa_texture.handle);
}
// Set projection matrix
const std::array ortho_matrix =
MakeOrthographicMatrix(static_cast<float>(layout.width), static_cast<float>(layout.height));
GLuint fragment_handle;
const auto filter = Settings::values.scaling_filter.GetValue();
switch (filter) {
case Settings::ScalingFilter::NearestNeighbor:
fragment_handle = present_bilinear_fragment.handle;
break;
case Settings::ScalingFilter::Bilinear:
fragment_handle = present_bilinear_fragment.handle;
break;
case Settings::ScalingFilter::Bicubic:
fragment_handle = present_bicubic_fragment.handle;
break;
case Settings::ScalingFilter::Gaussian:
fragment_handle = present_gaussian_fragment.handle;
break;
case Settings::ScalingFilter::ScaleForce:
fragment_handle = present_scaleforce_fragment.handle;
break;
case Settings::ScalingFilter::Fsr:
LOG_WARNING(
Render_OpenGL,
"FidelityFX FSR Super Sampling is not supported in OpenGL, changing to ScaleForce");
fragment_handle = present_scaleforce_fragment.handle;
break;
default:
fragment_handle = present_bilinear_fragment.handle;
break;
}
const auto fragment_handle = [this]() {
switch (Settings::values.scaling_filter.GetValue()) {
case Settings::ScalingFilter::NearestNeighbor:
case Settings::ScalingFilter::Bilinear:
return present_bilinear_fragment.handle;
case Settings::ScalingFilter::Bicubic:
return present_bicubic_fragment.handle;
case Settings::ScalingFilter::Gaussian:
return present_gaussian_fragment.handle;
case Settings::ScalingFilter::ScaleForce:
return present_scaleforce_fragment.handle;
case Settings::ScalingFilter::Fsr:
LOG_WARNING(
Render_OpenGL,
"FidelityFX Super Resolution is not supported in OpenGL, changing to ScaleForce");
return present_scaleforce_fragment.handle;
default:
return present_bilinear_fragment.handle;
}
}();
program_manager.BindPresentPrograms(present_vertex.handle, fragment_handle);
glProgramUniformMatrix3x2fv(present_vertex.handle, ModelViewMatrixLocation, 1, GL_FALSE,
ortho_matrix.data());

View file

@ -4,6 +4,7 @@
#include <algorithm>
#include "common/settings.h"
#include "video_core/host_shaders/convert_abgr8_to_d24s8_frag_spv.h"
#include "video_core/host_shaders/convert_d24s8_to_abgr8_frag_spv.h"
#include "video_core/host_shaders/convert_depth_to_float_frag_spv.h"
@ -335,6 +336,17 @@ void BindBlitState(vk::CommandBuffer cmdbuf, VkPipelineLayout layout, const Regi
cmdbuf.SetScissor(0, scissor);
cmdbuf.PushConstants(layout, VK_SHADER_STAGE_VERTEX_BIT, push_constants);
}
VkExtent2D GetConversionExtent(const ImageView& src_image_view) {
const auto& resolution = Settings::values.resolution_info;
const bool is_rescaled = src_image_view.IsRescaled();
u32 width = src_image_view.size.width;
u32 height = src_image_view.size.height;
return VkExtent2D{
.width = is_rescaled ? resolution.ScaleUp(width) : width,
.height = is_rescaled ? resolution.ScaleUp(height) : height,
};
}
} // Anonymous namespace
BlitImageHelper::BlitImageHelper(const Device& device_, VKScheduler& scheduler_,
@ -425,61 +437,52 @@ void BlitImageHelper::BlitDepthStencil(const Framebuffer* dst_framebuffer,
}
void BlitImageHelper::ConvertD32ToR32(const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale,
u32 down_shift) {
const ImageView& src_image_view) {
ConvertDepthToColorPipeline(convert_d32_to_r32_pipeline, dst_framebuffer->RenderPass());
Convert(*convert_d32_to_r32_pipeline, dst_framebuffer, src_image_view, up_scale, down_shift);
Convert(*convert_d32_to_r32_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::ConvertR32ToD32(const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale,
u32 down_shift) {
const ImageView& src_image_view) {
ConvertColorToDepthPipeline(convert_r32_to_d32_pipeline, dst_framebuffer->RenderPass());
Convert(*convert_r32_to_d32_pipeline, dst_framebuffer, src_image_view, up_scale, down_shift);
Convert(*convert_r32_to_d32_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::ConvertD16ToR16(const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale,
u32 down_shift) {
const ImageView& src_image_view) {
ConvertDepthToColorPipeline(convert_d16_to_r16_pipeline, dst_framebuffer->RenderPass());
Convert(*convert_d16_to_r16_pipeline, dst_framebuffer, src_image_view, up_scale, down_shift);
Convert(*convert_d16_to_r16_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::ConvertR16ToD16(const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale,
u32 down_shift) {
const ImageView& src_image_view) {
ConvertColorToDepthPipeline(convert_r16_to_d16_pipeline, dst_framebuffer->RenderPass());
Convert(*convert_r16_to_d16_pipeline, dst_framebuffer, src_image_view, up_scale, down_shift);
Convert(*convert_r16_to_d16_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::ConvertABGR8ToD24S8(const Framebuffer* dst_framebuffer,
ImageView& src_image_view, u32 up_scale, u32 down_shift) {
const ImageView& src_image_view) {
ConvertPipelineDepthTargetEx(convert_abgr8_to_d24s8_pipeline, dst_framebuffer->RenderPass(),
convert_abgr8_to_d24s8_frag, true);
ConvertColor(*convert_abgr8_to_d24s8_pipeline, dst_framebuffer, src_image_view, up_scale,
down_shift);
convert_abgr8_to_d24s8_frag);
Convert(*convert_abgr8_to_d24s8_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::ConvertD24S8ToABGR8(const Framebuffer* dst_framebuffer,
ImageView& src_image_view, u32 up_scale, u32 down_shift) {
ImageView& src_image_view) {
ConvertPipelineColorTargetEx(convert_d24s8_to_abgr8_pipeline, dst_framebuffer->RenderPass(),
convert_d24s8_to_abgr8_frag, false);
ConvertDepthStencil(*convert_d24s8_to_abgr8_pipeline, dst_framebuffer, src_image_view, up_scale,
down_shift);
convert_d24s8_to_abgr8_frag);
ConvertDepthStencil(*convert_d24s8_to_abgr8_pipeline, dst_framebuffer, src_image_view);
}
void BlitImageHelper::Convert(VkPipeline pipeline, const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale, u32 down_shift) {
const ImageView& src_image_view) {
const VkPipelineLayout layout = *one_texture_pipeline_layout;
const VkImageView src_view = src_image_view.Handle(Shader::TextureType::Color2D);
const VkSampler sampler = *nearest_sampler;
const VkExtent2D extent{
.width = std::max((src_image_view.size.width * up_scale) >> down_shift, 1U),
.height = std::max((src_image_view.size.height * up_scale) >> down_shift, 1U),
};
const VkExtent2D extent = GetConversionExtent(src_image_view);
scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([pipeline, layout, sampler, src_view, extent, up_scale, down_shift,
this](vk::CommandBuffer cmdbuf) {
scheduler.Record([pipeline, layout, sampler, src_view, extent, this](vk::CommandBuffer cmdbuf) {
const VkOffset2D offset{
.x = 0,
.y = 0,
@ -563,18 +566,16 @@ void BlitImageHelper::ConvertColor(VkPipeline pipeline, const Framebuffer* dst_f
}
void BlitImageHelper::ConvertDepthStencil(VkPipeline pipeline, const Framebuffer* dst_framebuffer,
ImageView& src_image_view, u32 up_scale, u32 down_shift) {
ImageView& src_image_view) {
const VkPipelineLayout layout = *two_textures_pipeline_layout;
const VkImageView src_depth_view = src_image_view.DepthView();
const VkImageView src_stencil_view = src_image_view.StencilView();
const VkSampler sampler = *nearest_sampler;
const VkExtent2D extent{
.width = std::max((src_image_view.size.width * up_scale) >> down_shift, 1U),
.height = std::max((src_image_view.size.height * up_scale) >> down_shift, 1U),
};
const VkExtent2D extent = GetConversionExtent(src_image_view);
scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([pipeline, layout, sampler, src_depth_view, src_stencil_view, extent, up_scale,
down_shift, this](vk::CommandBuffer cmdbuf) {
scheduler.Record([pipeline, layout, sampler, src_depth_view, src_stencil_view, extent,
this](vk::CommandBuffer cmdbuf) {
const VkOffset2D offset{
.x = 0,
.y = 0,
@ -695,11 +696,14 @@ VkPipeline BlitImageHelper::FindOrEmplaceDepthStencilPipeline(const BlitImagePip
return *blit_depth_stencil_pipelines.back();
}
void BlitImageHelper::ConvertDepthToColorPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass) {
void BlitImageHelper::ConvertPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass,
bool is_target_depth) {
if (pipeline) {
return;
}
const std::array stages = MakeStages(*full_screen_vert, *convert_depth_to_float_frag);
VkShaderModule frag_shader =
is_target_depth ? *convert_float_to_depth_frag : *convert_depth_to_float_frag;
const std::array stages = MakeStages(*full_screen_vert, frag_shader);
pipeline = device.GetLogical().CreateGraphicsPipeline({
.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
.pNext = nullptr,
@ -712,8 +716,9 @@ void BlitImageHelper::ConvertDepthToColorPipeline(vk::Pipeline& pipeline, VkRend
.pViewportState = &PIPELINE_VIEWPORT_STATE_CREATE_INFO,
.pRasterizationState = &PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
.pMultisampleState = &PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
.pDepthStencilState = nullptr,
.pColorBlendState = &PIPELINE_COLOR_BLEND_STATE_GENERIC_CREATE_INFO,
.pDepthStencilState = is_target_depth ? &PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO : nullptr,
.pColorBlendState = is_target_depth ? &PIPELINE_COLOR_BLEND_STATE_EMPTY_CREATE_INFO
: &PIPELINE_COLOR_BLEND_STATE_GENERIC_CREATE_INFO,
.pDynamicState = &PIPELINE_DYNAMIC_STATE_CREATE_INFO,
.layout = *one_texture_pipeline_layout,
.renderPass = renderpass,
@ -723,37 +728,17 @@ void BlitImageHelper::ConvertDepthToColorPipeline(vk::Pipeline& pipeline, VkRend
});
}
void BlitImageHelper::ConvertDepthToColorPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass) {
ConvertPipeline(pipeline, renderpass, false);
}
void BlitImageHelper::ConvertColorToDepthPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass) {
if (pipeline) {
return;
}
const std::array stages = MakeStages(*full_screen_vert, *convert_float_to_depth_frag);
pipeline = device.GetLogical().CreateGraphicsPipeline({
.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
.stageCount = static_cast<u32>(stages.size()),
.pStages = stages.data(),
.pVertexInputState = &PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
.pInputAssemblyState = &PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
.pTessellationState = nullptr,
.pViewportState = &PIPELINE_VIEWPORT_STATE_CREATE_INFO,
.pRasterizationState = &PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
.pMultisampleState = &PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
.pDepthStencilState = &PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
.pColorBlendState = &PIPELINE_COLOR_BLEND_STATE_EMPTY_CREATE_INFO,
.pDynamicState = &PIPELINE_DYNAMIC_STATE_CREATE_INFO,
.layout = *one_texture_pipeline_layout,
.renderPass = renderpass,
.subpass = 0,
.basePipelineHandle = VK_NULL_HANDLE,
.basePipelineIndex = 0,
});
ConvertPipeline(pipeline, renderpass, true);
}
void BlitImageHelper::ConvertPipelineEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool is_target_depth,
bool single_texture) {
vk::ShaderModule& module, bool single_texture,
bool is_target_depth) {
if (pipeline) {
return;
}
@ -782,13 +767,13 @@ void BlitImageHelper::ConvertPipelineEx(vk::Pipeline& pipeline, VkRenderPass ren
}
void BlitImageHelper::ConvertPipelineColorTargetEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool single_texture) {
ConvertPipelineEx(pipeline, renderpass, module, false, single_texture);
vk::ShaderModule& module) {
ConvertPipelineEx(pipeline, renderpass, module, false, false);
}
void BlitImageHelper::ConvertPipelineDepthTargetEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool single_texture) {
ConvertPipelineEx(pipeline, renderpass, module, true, single_texture);
vk::ShaderModule& module) {
ConvertPipelineEx(pipeline, renderpass, module, true, true);
}
} // namespace Vulkan

View file

@ -44,50 +44,46 @@ public:
const Region2D& src_region, Tegra::Engines::Fermi2D::Filter filter,
Tegra::Engines::Fermi2D::Operation operation);
void ConvertD32ToR32(const Framebuffer* dst_framebuffer, const ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertD32ToR32(const Framebuffer* dst_framebuffer, const ImageView& src_image_view);
void ConvertR32ToD32(const Framebuffer* dst_framebuffer, const ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertR32ToD32(const Framebuffer* dst_framebuffer, const ImageView& src_image_view);
void ConvertD16ToR16(const Framebuffer* dst_framebuffer, const ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertD16ToR16(const Framebuffer* dst_framebuffer, const ImageView& src_image_view);
void ConvertR16ToD16(const Framebuffer* dst_framebuffer, const ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertR16ToD16(const Framebuffer* dst_framebuffer, const ImageView& src_image_view);
void ConvertABGR8ToD24S8(const Framebuffer* dst_framebuffer, ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertABGR8ToD24S8(const Framebuffer* dst_framebuffer, const ImageView& src_image_view);
void ConvertD24S8ToABGR8(const Framebuffer* dst_framebuffer, ImageView& src_image_view,
u32 up_scale, u32 down_shift);
void ConvertD24S8ToABGR8(const Framebuffer* dst_framebuffer, ImageView& src_image_view);
private:
void Convert(VkPipeline pipeline, const Framebuffer* dst_framebuffer,
const ImageView& src_image_view, u32 up_scale, u32 down_shift);
const ImageView& src_image_view);
void ConvertColor(VkPipeline pipeline, const Framebuffer* dst_framebuffer,
ImageView& src_image_view, u32 up_scale, u32 down_shift);
void ConvertDepthStencil(VkPipeline pipeline, const Framebuffer* dst_framebuffer,
ImageView& src_image_view, u32 up_scale, u32 down_shift);
ImageView& src_image_view);
[[nodiscard]] VkPipeline FindOrEmplaceColorPipeline(const BlitImagePipelineKey& key);
[[nodiscard]] VkPipeline FindOrEmplaceDepthStencilPipeline(const BlitImagePipelineKey& key);
void ConvertPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass, bool is_target_depth);
void ConvertDepthToColorPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass);
void ConvertColorToDepthPipeline(vk::Pipeline& pipeline, VkRenderPass renderpass);
void ConvertPipelineEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool is_target_depth, bool single_texture);
vk::ShaderModule& module, bool single_texture, bool is_target_depth);
void ConvertPipelineColorTargetEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool single_texture);
vk::ShaderModule& module);
void ConvertPipelineDepthTargetEx(vk::Pipeline& pipeline, VkRenderPass renderpass,
vk::ShaderModule& module, bool single_texture);
vk::ShaderModule& module);
const Device& device;
VKScheduler& scheduler;

View file

@ -391,28 +391,23 @@ VkSemaphore VKBlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
.offset = {0, 0},
.extent = size,
};
const auto filter = Settings::values.scaling_filter.GetValue();
cmdbuf.BeginRenderPass(renderpass_bi, VK_SUBPASS_CONTENTS_INLINE);
switch (filter) {
case Settings::ScalingFilter::NearestNeighbor:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *bilinear_pipeline);
break;
case Settings::ScalingFilter::Bilinear:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *bilinear_pipeline);
break;
case Settings::ScalingFilter::Bicubic:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *bicubic_pipeline);
break;
case Settings::ScalingFilter::Gaussian:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *gaussian_pipeline);
break;
case Settings::ScalingFilter::ScaleForce:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *scaleforce_pipeline);
break;
default:
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *bilinear_pipeline);
break;
}
auto graphics_pipeline = [this]() {
switch (Settings::values.scaling_filter.GetValue()) {
case Settings::ScalingFilter::NearestNeighbor:
case Settings::ScalingFilter::Bilinear:
return *bilinear_pipeline;
case Settings::ScalingFilter::Bicubic:
return *bicubic_pipeline;
case Settings::ScalingFilter::Gaussian:
return *gaussian_pipeline;
case Settings::ScalingFilter::ScaleForce:
return *scaleforce_pipeline;
default:
return *bilinear_pipeline;
}
}();
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, graphics_pipeline);
cmdbuf.SetViewport(0, viewport);
cmdbuf.SetScissor(0, scissor);

View file

@ -1057,37 +1057,37 @@ void TextureCacheRuntime::BlitImage(Framebuffer* dst_framebuffer, ImageView& dst
});
}
void TextureCacheRuntime::ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view,
bool rescaled) {
const u32 up_scale = rescaled ? resolution.up_scale : 1;
const u32 down_shift = rescaled ? resolution.down_shift : 0;
void TextureCacheRuntime::ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view) {
switch (dst_view.format) {
case PixelFormat::R16_UNORM:
if (src_view.format == PixelFormat::D16_UNORM) {
return blit_image_helper.ConvertD16ToR16(dst, src_view, up_scale, down_shift);
return blit_image_helper.ConvertD16ToR16(dst, src_view);
}
break;
case PixelFormat::A8B8G8R8_UNORM:
if (src_view.format == PixelFormat::S8_UINT_D24_UNORM) {
return blit_image_helper.ConvertD24S8ToABGR8(dst, src_view, up_scale, down_shift);
return blit_image_helper.ConvertD24S8ToABGR8(dst, src_view);
}
break;
case PixelFormat::R32_FLOAT:
if (src_view.format == PixelFormat::D32_FLOAT) {
return blit_image_helper.ConvertD32ToR32(dst, src_view, up_scale, down_shift);
return blit_image_helper.ConvertD32ToR32(dst, src_view);
}
break;
case PixelFormat::D16_UNORM:
if (src_view.format == PixelFormat::R16_UNORM) {
return blit_image_helper.ConvertR16ToD16(dst, src_view, up_scale, down_shift);
return blit_image_helper.ConvertR16ToD16(dst, src_view);
}
break;
case PixelFormat::S8_UINT_D24_UNORM:
return blit_image_helper.ConvertABGR8ToD24S8(dst, src_view, up_scale, down_shift);
if (src_view.format == PixelFormat::A8B8G8R8_UNORM ||
src_view.format == PixelFormat::B8G8R8A8_UNORM) {
return blit_image_helper.ConvertABGR8ToD24S8(dst, src_view);
}
break;
case PixelFormat::D32_FLOAT:
if (src_view.format == PixelFormat::R32_FLOAT) {
return blit_image_helper.ConvertR32ToD32(dst, src_view, up_scale, down_shift);
return blit_image_helper.ConvertR32ToD32(dst, src_view);
}
break;
default:
@ -1329,6 +1329,10 @@ void Image::DownloadMemory(const StagingBufferRef& map, std::span<const BufferIm
}
}
bool Image::IsRescaled() const noexcept {
return True(flags & ImageFlagBits::Rescaled);
}
bool Image::ScaleUp(bool ignore) {
if (True(flags & ImageFlagBits::Rescaled)) {
return false;
@ -1469,7 +1473,8 @@ bool Image::BlitScaleHelper(bool scale_up) {
ImageView::ImageView(TextureCacheRuntime& runtime, const VideoCommon::ImageViewInfo& info,
ImageId image_id_, Image& image)
: VideoCommon::ImageViewBase{info, image.info, image_id_}, device{&runtime.device},
image_handle{image.Handle()}, samples{ConvertSampleCount(image.info.num_samples)} {
src_image{&image}, image_handle{image.Handle()},
samples(ConvertSampleCount(image.info.num_samples)) {
using Shader::TextureType;
const VkImageAspectFlags aspect_mask = ImageViewAspectMask(info);
@ -1607,6 +1612,13 @@ VkImageView ImageView::StorageView(Shader::TextureType texture_type,
return *view;
}
bool ImageView::IsRescaled() const noexcept {
if (!src_image) {
return false;
}
return src_image->IsRescaled();
}
vk::ImageView ImageView::MakeView(VkFormat vk_format, VkImageAspectFlags aspect_mask) {
return device->GetLogical().CreateImageView({
.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,

View file

@ -65,7 +65,7 @@ public:
void ReinterpretImage(Image& dst, Image& src, std::span<const VideoCommon::ImageCopy> copies);
void ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view, bool rescaled);
void ConvertImage(Framebuffer* dst, ImageView& dst_view, ImageView& src_view);
bool CanAccelerateImageUpload(Image&) const noexcept {
return false;
@ -139,6 +139,8 @@ public:
return std::exchange(initialized, true);
}
bool IsRescaled() const noexcept;
bool ScaleUp(bool ignore = false);
bool ScaleDown(bool ignore = false);
@ -189,6 +191,8 @@ public:
[[nodiscard]] VkImageView StorageView(Shader::TextureType texture_type,
Shader::ImageFormat image_format);
[[nodiscard]] bool IsRescaled() const noexcept;
[[nodiscard]] VkImageView Handle(Shader::TextureType texture_type) const noexcept {
return *image_views[static_cast<size_t>(texture_type)];
}
@ -222,6 +226,8 @@ private:
[[nodiscard]] vk::ImageView MakeView(VkFormat vk_format, VkImageAspectFlags aspect_mask);
const Device* device = nullptr;
const Image* src_image{};
std::array<vk::ImageView, Shader::NUM_TEXTURE_TYPES> image_views;
std::unique_ptr<StorageViews> storage_views;
vk::ImageView depth_view;

View file

@ -1855,9 +1855,20 @@ void TextureCache<P>::CopyImage(ImageId dst_id, ImageId src_id, std::vector<Imag
.height = std::min(dst_view.size.height, src_view.size.height),
.depth = std::min(dst_view.size.depth, src_view.size.depth),
};
UNIMPLEMENTED_IF(copy.extent != expected_size);
const Extent3D scaled_extent = [is_rescaled, expected_size]() {
if (!is_rescaled) {
return expected_size;
}
const auto& resolution = Settings::values.resolution_info;
return Extent3D{
.width = resolution.ScaleUp(expected_size.width),
.height = resolution.ScaleUp(expected_size.height),
.depth = expected_size.depth,
};
}();
UNIMPLEMENTED_IF(copy.extent != scaled_extent);
runtime.ConvertImage(dst_framebuffer, dst_view, src_view, is_rescaled);
runtime.ConvertImage(dst_framebuffer, dst_view, src_view);
}
}