2022-04-23 09:59:50 +01:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2021 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2021-03-08 21:31:53 +00:00
|
|
|
|
2021-04-04 07:00:41 +01:00
|
|
|
#include <algorithm>
|
2021-04-22 20:17:59 +01:00
|
|
|
#include <bit>
|
2021-03-08 21:31:53 +00:00
|
|
|
#include <optional>
|
|
|
|
|
|
|
|
#include <boost/container/small_vector.hpp>
|
|
|
|
|
|
|
|
#include "shader_recompiler/environment.h"
|
|
|
|
#include "shader_recompiler/frontend/ir/basic_block.h"
|
2021-04-04 07:00:41 +01:00
|
|
|
#include "shader_recompiler/frontend/ir/breadth_first_search.h"
|
2021-03-08 21:31:53 +00:00
|
|
|
#include "shader_recompiler/frontend/ir/ir_emitter.h"
|
2022-11-11 02:32:53 +00:00
|
|
|
#include "shader_recompiler/host_translate_info.h"
|
2021-03-08 21:31:53 +00:00
|
|
|
#include "shader_recompiler/ir_opt/passes.h"
|
|
|
|
#include "shader_recompiler/shader_info.h"
|
|
|
|
|
|
|
|
namespace Shader::Optimization {
|
|
|
|
namespace {
|
|
|
|
struct ConstBufferAddr {
|
|
|
|
u32 index;
|
|
|
|
u32 offset;
|
2022-03-06 18:54:40 +00:00
|
|
|
u32 shift_left;
|
2021-04-20 23:48:45 +01:00
|
|
|
u32 secondary_index;
|
|
|
|
u32 secondary_offset;
|
2022-03-06 18:54:40 +00:00
|
|
|
u32 secondary_shift_left;
|
2021-04-22 20:17:59 +01:00
|
|
|
IR::U32 dynamic_offset;
|
|
|
|
u32 count;
|
2021-04-20 23:48:45 +01:00
|
|
|
bool has_secondary;
|
2021-03-08 21:31:53 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct TextureInst {
|
|
|
|
ConstBufferAddr cbuf;
|
|
|
|
IR::Inst* inst;
|
|
|
|
IR::Block* block;
|
|
|
|
};
|
|
|
|
|
|
|
|
using TextureInstVector = boost::container::small_vector<TextureInst, 24>;
|
|
|
|
|
2021-04-22 20:17:59 +01:00
|
|
|
constexpr u32 DESCRIPTOR_SIZE = 8;
|
|
|
|
constexpr u32 DESCRIPTOR_SIZE_SHIFT = static_cast<u32>(std::countr_zero(DESCRIPTOR_SIZE));
|
|
|
|
|
2021-03-08 21:31:53 +00:00
|
|
|
IR::Opcode IndexedInstruction(const IR::Inst& inst) {
|
2021-04-06 03:25:22 +01:00
|
|
|
switch (inst.GetOpcode()) {
|
2021-03-08 21:31:53 +00:00
|
|
|
case IR::Opcode::BindlessImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleImplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleImplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleExplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleExplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefImplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleDrefImplicitLod;
|
|
|
|
case IR::Opcode::BoundImageSampleDrefExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefExplicitLod:
|
|
|
|
return IR::Opcode::ImageSampleDrefExplicitLod;
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BindlessImageGather:
|
|
|
|
case IR::Opcode::BoundImageGather:
|
|
|
|
return IR::Opcode::ImageGather;
|
|
|
|
case IR::Opcode::BindlessImageGatherDref:
|
|
|
|
case IR::Opcode::BoundImageGatherDref:
|
|
|
|
return IR::Opcode::ImageGatherDref;
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BindlessImageFetch:
|
|
|
|
case IR::Opcode::BoundImageFetch:
|
|
|
|
return IR::Opcode::ImageFetch;
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BoundImageQueryDimensions:
|
|
|
|
case IR::Opcode::BindlessImageQueryDimensions:
|
|
|
|
return IR::Opcode::ImageQueryDimensions;
|
2021-03-28 18:47:52 +01:00
|
|
|
case IR::Opcode::BoundImageQueryLod:
|
|
|
|
case IR::Opcode::BindlessImageQueryLod:
|
|
|
|
return IR::Opcode::ImageQueryLod;
|
2021-03-29 01:00:43 +01:00
|
|
|
case IR::Opcode::BoundImageGradient:
|
|
|
|
case IR::Opcode::BindlessImageGradient:
|
|
|
|
return IR::Opcode::ImageGradient;
|
2021-04-09 05:45:39 +01:00
|
|
|
case IR::Opcode::BoundImageRead:
|
|
|
|
case IR::Opcode::BindlessImageRead:
|
|
|
|
return IR::Opcode::ImageRead;
|
|
|
|
case IR::Opcode::BoundImageWrite:
|
|
|
|
case IR::Opcode::BindlessImageWrite:
|
|
|
|
return IR::Opcode::ImageWrite;
|
2021-04-23 22:47:54 +01:00
|
|
|
case IR::Opcode::BoundImageAtomicIAdd32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicIAdd32:
|
|
|
|
return IR::Opcode::ImageAtomicIAdd32;
|
|
|
|
case IR::Opcode::BoundImageAtomicSMin32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicSMin32:
|
|
|
|
return IR::Opcode::ImageAtomicSMin32;
|
|
|
|
case IR::Opcode::BoundImageAtomicUMin32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicUMin32:
|
|
|
|
return IR::Opcode::ImageAtomicUMin32;
|
|
|
|
case IR::Opcode::BoundImageAtomicSMax32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicSMax32:
|
|
|
|
return IR::Opcode::ImageAtomicSMax32;
|
|
|
|
case IR::Opcode::BoundImageAtomicUMax32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicUMax32:
|
|
|
|
return IR::Opcode::ImageAtomicUMax32;
|
|
|
|
case IR::Opcode::BoundImageAtomicInc32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicInc32:
|
|
|
|
return IR::Opcode::ImageAtomicInc32;
|
|
|
|
case IR::Opcode::BoundImageAtomicDec32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicDec32:
|
|
|
|
return IR::Opcode::ImageAtomicDec32;
|
|
|
|
case IR::Opcode::BoundImageAtomicAnd32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicAnd32:
|
|
|
|
return IR::Opcode::ImageAtomicAnd32;
|
|
|
|
case IR::Opcode::BoundImageAtomicOr32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicOr32:
|
|
|
|
return IR::Opcode::ImageAtomicOr32;
|
|
|
|
case IR::Opcode::BoundImageAtomicXor32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicXor32:
|
|
|
|
return IR::Opcode::ImageAtomicXor32;
|
|
|
|
case IR::Opcode::BoundImageAtomicExchange32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicExchange32:
|
|
|
|
return IR::Opcode::ImageAtomicExchange32;
|
2021-03-08 21:31:53 +00:00
|
|
|
default:
|
|
|
|
return IR::Opcode::Void;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsBindless(const IR::Inst& inst) {
|
2021-04-06 03:25:22 +01:00
|
|
|
switch (inst.GetOpcode()) {
|
2021-03-08 21:31:53 +00:00
|
|
|
case IR::Opcode::BindlessImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BindlessImageSampleDrefExplicitLod:
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BindlessImageGather:
|
|
|
|
case IR::Opcode::BindlessImageGatherDref:
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BindlessImageFetch:
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BindlessImageQueryDimensions:
|
2021-03-28 18:47:52 +01:00
|
|
|
case IR::Opcode::BindlessImageQueryLod:
|
2021-03-29 01:00:43 +01:00
|
|
|
case IR::Opcode::BindlessImageGradient:
|
2021-04-09 05:45:39 +01:00
|
|
|
case IR::Opcode::BindlessImageRead:
|
|
|
|
case IR::Opcode::BindlessImageWrite:
|
2021-04-23 22:47:54 +01:00
|
|
|
case IR::Opcode::BindlessImageAtomicIAdd32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicSMin32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicUMin32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicSMax32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicUMax32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicInc32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicDec32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicAnd32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicOr32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicXor32:
|
|
|
|
case IR::Opcode::BindlessImageAtomicExchange32:
|
2021-03-08 21:31:53 +00:00
|
|
|
return true;
|
|
|
|
case IR::Opcode::BoundImageSampleImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleExplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleDrefImplicitLod:
|
|
|
|
case IR::Opcode::BoundImageSampleDrefExplicitLod:
|
2021-03-24 22:41:55 +00:00
|
|
|
case IR::Opcode::BoundImageGather:
|
|
|
|
case IR::Opcode::BoundImageGatherDref:
|
2021-03-26 18:24:50 +00:00
|
|
|
case IR::Opcode::BoundImageFetch:
|
2021-03-26 21:45:38 +00:00
|
|
|
case IR::Opcode::BoundImageQueryDimensions:
|
2021-03-28 18:47:52 +01:00
|
|
|
case IR::Opcode::BoundImageQueryLod:
|
2021-03-29 01:00:43 +01:00
|
|
|
case IR::Opcode::BoundImageGradient:
|
2021-04-09 05:45:39 +01:00
|
|
|
case IR::Opcode::BoundImageRead:
|
|
|
|
case IR::Opcode::BoundImageWrite:
|
2021-04-23 22:47:54 +01:00
|
|
|
case IR::Opcode::BoundImageAtomicIAdd32:
|
|
|
|
case IR::Opcode::BoundImageAtomicSMin32:
|
|
|
|
case IR::Opcode::BoundImageAtomicUMin32:
|
|
|
|
case IR::Opcode::BoundImageAtomicSMax32:
|
|
|
|
case IR::Opcode::BoundImageAtomicUMax32:
|
|
|
|
case IR::Opcode::BoundImageAtomicInc32:
|
|
|
|
case IR::Opcode::BoundImageAtomicDec32:
|
|
|
|
case IR::Opcode::BoundImageAtomicAnd32:
|
|
|
|
case IR::Opcode::BoundImageAtomicOr32:
|
|
|
|
case IR::Opcode::BoundImageAtomicXor32:
|
|
|
|
case IR::Opcode::BoundImageAtomicExchange32:
|
2021-03-08 21:31:53 +00:00
|
|
|
return false;
|
|
|
|
default:
|
2021-04-06 03:25:22 +01:00
|
|
|
throw InvalidArgument("Invalid opcode {}", inst.GetOpcode());
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool IsTextureInstruction(const IR::Inst& inst) {
|
|
|
|
return IndexedInstruction(inst) != IR::Opcode::Void;
|
|
|
|
}
|
|
|
|
|
2022-03-26 12:40:42 +00:00
|
|
|
std::optional<ConstBufferAddr> TryGetConstBuffer(const IR::Inst* inst, Environment& env);
|
2021-04-20 23:48:45 +01:00
|
|
|
|
2022-03-26 12:40:42 +00:00
|
|
|
std::optional<ConstBufferAddr> Track(const IR::Value& value, Environment& env) {
|
|
|
|
return IR::BreadthFirstSearch(
|
|
|
|
value, [&env](const IR::Inst* inst) { return TryGetConstBuffer(inst, env); });
|
2021-04-20 23:48:45 +01:00
|
|
|
}
|
|
|
|
|
2022-03-26 12:40:42 +00:00
|
|
|
std::optional<u32> TryGetConstant(IR::Value& value, Environment& env) {
|
|
|
|
const IR::Inst* inst = value.InstRecursive();
|
|
|
|
if (inst->GetOpcode() != IR::Opcode::GetCbufU32) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
const IR::Value index{inst->Arg(0)};
|
|
|
|
const IR::Value offset{inst->Arg(1)};
|
|
|
|
if (!index.IsImmediate()) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
if (!offset.IsImmediate()) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
const auto index_number = index.U32();
|
|
|
|
if (index_number != 1) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
const auto offset_number = offset.U32();
|
|
|
|
return env.ReadCbufValue(index_number, offset_number);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<ConstBufferAddr> TryGetConstBuffer(const IR::Inst* inst, Environment& env) {
|
2021-04-20 23:48:45 +01:00
|
|
|
switch (inst->GetOpcode()) {
|
|
|
|
default:
|
2021-03-08 21:31:53 +00:00
|
|
|
return std::nullopt;
|
2021-04-20 23:48:45 +01:00
|
|
|
case IR::Opcode::BitwiseOr32: {
|
2022-03-26 12:40:42 +00:00
|
|
|
std::optional lhs{Track(inst->Arg(0), env)};
|
|
|
|
std::optional rhs{Track(inst->Arg(1), env)};
|
2021-04-20 23:48:45 +01:00
|
|
|
if (!lhs || !rhs) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
if (lhs->has_secondary || rhs->has_secondary) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
2021-04-22 20:17:59 +01:00
|
|
|
if (lhs->count > 1 || rhs->count > 1) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
2022-03-06 18:54:40 +00:00
|
|
|
if (lhs->shift_left > 0 || lhs->index > rhs->index || lhs->offset > rhs->offset) {
|
2021-04-20 23:48:45 +01:00
|
|
|
std::swap(lhs, rhs);
|
|
|
|
}
|
|
|
|
return ConstBufferAddr{
|
|
|
|
.index = lhs->index,
|
|
|
|
.offset = lhs->offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = lhs->shift_left,
|
2021-04-20 23:48:45 +01:00
|
|
|
.secondary_index = rhs->index,
|
|
|
|
.secondary_offset = rhs->offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = rhs->shift_left,
|
2021-04-22 20:17:59 +01:00
|
|
|
.dynamic_offset = {},
|
|
|
|
.count = 1,
|
2021-04-20 23:48:45 +01:00
|
|
|
.has_secondary = true,
|
|
|
|
};
|
|
|
|
}
|
2022-03-06 18:54:40 +00:00
|
|
|
case IR::Opcode::ShiftLeftLogical32: {
|
|
|
|
const IR::Value shift{inst->Arg(1)};
|
|
|
|
if (!shift.IsImmediate()) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
2022-03-26 12:40:42 +00:00
|
|
|
std::optional lhs{Track(inst->Arg(0), env)};
|
2022-03-06 18:54:40 +00:00
|
|
|
if (lhs) {
|
|
|
|
lhs->shift_left = shift.U32();
|
|
|
|
}
|
|
|
|
return lhs;
|
|
|
|
break;
|
|
|
|
}
|
2022-03-26 12:40:42 +00:00
|
|
|
case IR::Opcode::BitwiseAnd32: {
|
|
|
|
IR::Value op1{inst->Arg(0)};
|
|
|
|
IR::Value op2{inst->Arg(1)};
|
|
|
|
if (op1.IsImmediate()) {
|
|
|
|
std::swap(op1, op2);
|
|
|
|
}
|
|
|
|
if (!op2.IsImmediate() && !op1.IsImmediate()) {
|
|
|
|
do {
|
|
|
|
auto try_index = TryGetConstant(op1, env);
|
|
|
|
if (try_index) {
|
|
|
|
op1 = op2;
|
|
|
|
op2 = IR::Value{*try_index};
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
auto try_index_2 = TryGetConstant(op2, env);
|
|
|
|
if (try_index_2) {
|
|
|
|
op2 = IR::Value{*try_index_2};
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
return std::nullopt;
|
|
|
|
} while (false);
|
|
|
|
}
|
|
|
|
std::optional lhs{Track(op1, env)};
|
|
|
|
if (lhs) {
|
2022-04-13 20:02:55 +01:00
|
|
|
lhs->shift_left = static_cast<u32>(std::countr_zero(op2.U32()));
|
2022-03-26 12:40:42 +00:00
|
|
|
}
|
|
|
|
return lhs;
|
|
|
|
break;
|
|
|
|
}
|
2021-04-22 20:17:59 +01:00
|
|
|
case IR::Opcode::GetCbufU32x2:
|
2021-04-20 23:48:45 +01:00
|
|
|
case IR::Opcode::GetCbufU32:
|
|
|
|
break;
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
2021-04-04 07:00:41 +01:00
|
|
|
const IR::Value index{inst->Arg(0)};
|
|
|
|
const IR::Value offset{inst->Arg(1)};
|
|
|
|
if (!index.IsImmediate()) {
|
|
|
|
// Reading a bindless texture from variable indices is valid
|
|
|
|
// but not supported here at the moment
|
|
|
|
return std::nullopt;
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
2021-04-22 20:17:59 +01:00
|
|
|
if (offset.IsImmediate()) {
|
|
|
|
return ConstBufferAddr{
|
|
|
|
.index = index.U32(),
|
|
|
|
.offset = offset.U32(),
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = 0,
|
2021-04-22 20:17:59 +01:00
|
|
|
.secondary_index = 0,
|
|
|
|
.secondary_offset = 0,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = 0,
|
2021-04-22 20:17:59 +01:00
|
|
|
.dynamic_offset = {},
|
|
|
|
.count = 1,
|
|
|
|
.has_secondary = false,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
IR::Inst* const offset_inst{offset.InstRecursive()};
|
|
|
|
if (offset_inst->GetOpcode() != IR::Opcode::IAdd32) {
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
u32 base_offset{};
|
|
|
|
IR::U32 dynamic_offset;
|
|
|
|
if (offset_inst->Arg(0).IsImmediate()) {
|
|
|
|
base_offset = offset_inst->Arg(0).U32();
|
|
|
|
dynamic_offset = IR::U32{offset_inst->Arg(1)};
|
|
|
|
} else if (offset_inst->Arg(1).IsImmediate()) {
|
|
|
|
base_offset = offset_inst->Arg(1).U32();
|
|
|
|
dynamic_offset = IR::U32{offset_inst->Arg(0)};
|
|
|
|
} else {
|
2021-04-04 07:00:41 +01:00
|
|
|
return std::nullopt;
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
2021-04-04 07:00:41 +01:00
|
|
|
return ConstBufferAddr{
|
2021-04-22 20:17:59 +01:00
|
|
|
.index = index.U32(),
|
|
|
|
.offset = base_offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = 0,
|
2021-04-20 23:48:45 +01:00
|
|
|
.secondary_index = 0,
|
|
|
|
.secondary_offset = 0,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = 0,
|
2021-04-22 20:17:59 +01:00
|
|
|
.dynamic_offset = dynamic_offset,
|
|
|
|
.count = 8,
|
2021-04-20 23:48:45 +01:00
|
|
|
.has_secondary = false,
|
2021-04-04 07:00:41 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2021-03-08 21:31:53 +00:00
|
|
|
TextureInst MakeInst(Environment& env, IR::Block* block, IR::Inst& inst) {
|
|
|
|
ConstBufferAddr addr;
|
|
|
|
if (IsBindless(inst)) {
|
2022-03-26 12:40:42 +00:00
|
|
|
const std::optional<ConstBufferAddr> track_addr{Track(inst.Arg(0), env)};
|
2021-03-08 21:31:53 +00:00
|
|
|
if (!track_addr) {
|
|
|
|
throw NotImplementedException("Failed to track bindless texture constant buffer");
|
|
|
|
}
|
|
|
|
addr = *track_addr;
|
|
|
|
} else {
|
|
|
|
addr = ConstBufferAddr{
|
2021-04-06 03:25:22 +01:00
|
|
|
.index = env.TextureBoundBuffer(),
|
|
|
|
.offset = inst.Arg(0).U32(),
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = 0,
|
2021-04-20 23:48:45 +01:00
|
|
|
.secondary_index = 0,
|
|
|
|
.secondary_offset = 0,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = 0,
|
2021-04-22 20:17:59 +01:00
|
|
|
.dynamic_offset = {},
|
|
|
|
.count = 1,
|
2021-04-20 23:48:45 +01:00
|
|
|
.has_secondary = false,
|
2021-03-08 21:31:53 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
return TextureInst{
|
2021-04-22 20:17:59 +01:00
|
|
|
.cbuf = addr,
|
2021-04-06 03:25:22 +01:00
|
|
|
.inst = &inst,
|
|
|
|
.block = block,
|
2021-03-08 21:31:53 +00:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2023-01-29 02:04:24 +00:00
|
|
|
u32 GetTextureHandle(Environment& env, const ConstBufferAddr& cbuf) {
|
2021-05-28 05:36:25 +01:00
|
|
|
const u32 secondary_index{cbuf.has_secondary ? cbuf.secondary_index : cbuf.index};
|
|
|
|
const u32 secondary_offset{cbuf.has_secondary ? cbuf.secondary_offset : cbuf.offset};
|
2022-03-06 18:54:40 +00:00
|
|
|
const u32 lhs_raw{env.ReadCbufValue(cbuf.index, cbuf.offset) << cbuf.shift_left};
|
|
|
|
const u32 rhs_raw{env.ReadCbufValue(secondary_index, secondary_offset)
|
|
|
|
<< cbuf.secondary_shift_left};
|
2023-01-29 02:04:24 +00:00
|
|
|
return lhs_raw | rhs_raw;
|
|
|
|
}
|
|
|
|
|
|
|
|
TextureType ReadTextureType(Environment& env, const ConstBufferAddr& cbuf) {
|
|
|
|
return env.ReadTextureType(GetTextureHandle(env, cbuf));
|
2021-04-20 23:48:45 +01:00
|
|
|
}
|
|
|
|
|
2022-11-04 06:39:42 +00:00
|
|
|
TexturePixelFormat ReadTexturePixelFormat(Environment& env, const ConstBufferAddr& cbuf) {
|
2023-01-29 02:04:24 +00:00
|
|
|
return env.ReadTexturePixelFormat(GetTextureHandle(env, cbuf));
|
2022-11-04 06:39:42 +00:00
|
|
|
}
|
|
|
|
|
2021-03-08 21:31:53 +00:00
|
|
|
class Descriptors {
|
|
|
|
public:
|
2021-04-09 05:45:39 +01:00
|
|
|
explicit Descriptors(TextureBufferDescriptors& texture_buffer_descriptors_,
|
2021-04-15 01:36:36 +01:00
|
|
|
ImageBufferDescriptors& image_buffer_descriptors_,
|
2021-04-09 05:45:39 +01:00
|
|
|
TextureDescriptors& texture_descriptors_,
|
|
|
|
ImageDescriptors& image_descriptors_)
|
|
|
|
: texture_buffer_descriptors{texture_buffer_descriptors_},
|
2021-04-15 01:36:36 +01:00
|
|
|
image_buffer_descriptors{image_buffer_descriptors_},
|
2021-04-09 05:45:39 +01:00
|
|
|
texture_descriptors{texture_descriptors_}, image_descriptors{image_descriptors_} {}
|
|
|
|
|
|
|
|
u32 Add(const TextureBufferDescriptor& desc) {
|
|
|
|
return Add(texture_buffer_descriptors, desc, [&desc](const auto& existing) {
|
2021-04-22 20:17:59 +01:00
|
|
|
return desc.cbuf_index == existing.cbuf_index &&
|
2021-04-20 23:48:45 +01:00
|
|
|
desc.cbuf_offset == existing.cbuf_offset &&
|
2023-01-29 02:21:42 +00:00
|
|
|
desc.shift_left == existing.shift_left &&
|
2021-04-20 23:48:45 +01:00
|
|
|
desc.secondary_cbuf_index == existing.secondary_cbuf_index &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.secondary_cbuf_offset == existing.secondary_cbuf_offset &&
|
2023-01-29 02:21:42 +00:00
|
|
|
desc.secondary_shift_left == existing.secondary_shift_left &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.count == existing.count && desc.size_shift == existing.size_shift &&
|
|
|
|
desc.has_secondary == existing.has_secondary;
|
2021-04-09 05:45:39 +01:00
|
|
|
});
|
|
|
|
}
|
2021-04-06 06:56:15 +01:00
|
|
|
|
2021-04-15 01:36:36 +01:00
|
|
|
u32 Add(const ImageBufferDescriptor& desc) {
|
2021-06-15 22:23:57 +01:00
|
|
|
const u32 index{Add(image_buffer_descriptors, desc, [&desc](const auto& existing) {
|
2021-04-15 01:36:36 +01:00
|
|
|
return desc.format == existing.format && desc.cbuf_index == existing.cbuf_index &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.cbuf_offset == existing.cbuf_offset && desc.count == existing.count &&
|
|
|
|
desc.size_shift == existing.size_shift;
|
2021-06-15 22:23:57 +01:00
|
|
|
})};
|
|
|
|
image_buffer_descriptors[index].is_written |= desc.is_written;
|
|
|
|
image_buffer_descriptors[index].is_read |= desc.is_read;
|
|
|
|
return index;
|
2021-04-15 01:36:36 +01:00
|
|
|
}
|
|
|
|
|
2021-04-06 06:56:15 +01:00
|
|
|
u32 Add(const TextureDescriptor& desc) {
|
2023-01-29 02:21:42 +00:00
|
|
|
const u32 index{Add(texture_descriptors, desc, [&desc](const auto& existing) {
|
2021-04-20 23:48:45 +01:00
|
|
|
return desc.type == existing.type && desc.is_depth == existing.is_depth &&
|
|
|
|
desc.has_secondary == existing.has_secondary &&
|
|
|
|
desc.cbuf_index == existing.cbuf_index &&
|
|
|
|
desc.cbuf_offset == existing.cbuf_offset &&
|
2023-01-29 02:21:42 +00:00
|
|
|
desc.shift_left == existing.shift_left &&
|
2021-04-20 23:48:45 +01:00
|
|
|
desc.secondary_cbuf_index == existing.secondary_cbuf_index &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.secondary_cbuf_offset == existing.secondary_cbuf_offset &&
|
2023-01-29 02:21:42 +00:00
|
|
|
desc.secondary_shift_left == existing.secondary_shift_left &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.count == existing.count && desc.size_shift == existing.size_shift;
|
2023-01-29 02:21:42 +00:00
|
|
|
})};
|
|
|
|
// TODO: Read this from TIC
|
|
|
|
texture_descriptors[index].is_multisample |= desc.is_multisample;
|
|
|
|
return index;
|
2021-04-06 06:56:15 +01:00
|
|
|
}
|
|
|
|
|
2021-04-09 05:45:39 +01:00
|
|
|
u32 Add(const ImageDescriptor& desc) {
|
|
|
|
const u32 index{Add(image_descriptors, desc, [&desc](const auto& existing) {
|
|
|
|
return desc.type == existing.type && desc.format == existing.format &&
|
|
|
|
desc.cbuf_index == existing.cbuf_index &&
|
2021-04-22 20:17:59 +01:00
|
|
|
desc.cbuf_offset == existing.cbuf_offset && desc.count == existing.count &&
|
|
|
|
desc.size_shift == existing.size_shift;
|
2021-04-09 05:45:39 +01:00
|
|
|
})};
|
|
|
|
image_descriptors[index].is_written |= desc.is_written;
|
2021-06-15 22:23:57 +01:00
|
|
|
image_descriptors[index].is_read |= desc.is_read;
|
2021-04-09 05:45:39 +01:00
|
|
|
return index;
|
2021-04-06 06:56:15 +01:00
|
|
|
}
|
2021-03-08 21:31:53 +00:00
|
|
|
|
2021-04-06 06:56:15 +01:00
|
|
|
private:
|
|
|
|
template <typename Descriptors, typename Descriptor, typename Func>
|
|
|
|
static u32 Add(Descriptors& descriptors, const Descriptor& desc, Func&& pred) {
|
2021-03-08 21:31:53 +00:00
|
|
|
// TODO: Handle arrays
|
2021-04-06 06:56:15 +01:00
|
|
|
const auto it{std::ranges::find_if(descriptors, pred)};
|
2021-03-08 21:31:53 +00:00
|
|
|
if (it != descriptors.end()) {
|
|
|
|
return static_cast<u32>(std::distance(descriptors.begin(), it));
|
|
|
|
}
|
2021-04-06 06:56:15 +01:00
|
|
|
descriptors.push_back(desc);
|
2021-03-08 21:31:53 +00:00
|
|
|
return static_cast<u32>(descriptors.size()) - 1;
|
|
|
|
}
|
|
|
|
|
2021-04-06 06:56:15 +01:00
|
|
|
TextureBufferDescriptors& texture_buffer_descriptors;
|
2021-04-15 01:36:36 +01:00
|
|
|
ImageBufferDescriptors& image_buffer_descriptors;
|
2021-04-09 05:45:39 +01:00
|
|
|
TextureDescriptors& texture_descriptors;
|
|
|
|
ImageDescriptors& image_descriptors;
|
2021-03-08 21:31:53 +00:00
|
|
|
};
|
2022-08-10 03:10:32 +01:00
|
|
|
|
|
|
|
void PatchImageSampleImplicitLod(IR::Block& block, IR::Inst& inst) {
|
|
|
|
IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
|
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
const IR::Value coord(inst.Arg(1));
|
|
|
|
const IR::Value handle(ir.Imm32(0));
|
|
|
|
const IR::U32 lod{ir.Imm32(0)};
|
2023-01-28 21:09:58 +00:00
|
|
|
const IR::U1 skip_mips{ir.Imm1(true)};
|
|
|
|
const IR::Value texture_size = ir.ImageQueryDimension(handle, lod, skip_mips, info);
|
2022-08-10 03:10:32 +01:00
|
|
|
inst.SetArg(
|
|
|
|
1, ir.CompositeConstruct(
|
|
|
|
ir.FPMul(IR::F32(ir.CompositeExtract(coord, 0)),
|
|
|
|
ir.FPRecip(ir.ConvertUToF(32, 32, ir.CompositeExtract(texture_size, 0)))),
|
|
|
|
ir.FPMul(IR::F32(ir.CompositeExtract(coord, 1)),
|
|
|
|
ir.FPRecip(ir.ConvertUToF(32, 32, ir.CompositeExtract(texture_size, 1))))));
|
|
|
|
}
|
2022-11-04 06:39:42 +00:00
|
|
|
|
2022-11-11 02:32:53 +00:00
|
|
|
void PatchTexelFetch(IR::Block& block, IR::Inst& inst, TexturePixelFormat pixel_format) {
|
2022-11-04 06:39:42 +00:00
|
|
|
const auto it{IR::Block::InstructionList::s_iterator_to(inst)};
|
|
|
|
IR::IREmitter ir{block, IR::Block::InstructionList::s_iterator_to(inst)};
|
|
|
|
auto get_max_value = [pixel_format]() -> float {
|
|
|
|
switch (pixel_format) {
|
|
|
|
case TexturePixelFormat::A8B8G8R8_SNORM:
|
|
|
|
case TexturePixelFormat::R8G8_SNORM:
|
|
|
|
case TexturePixelFormat::R8_SNORM:
|
|
|
|
return 1.f / std::numeric_limits<char>::max();
|
|
|
|
case TexturePixelFormat::R16G16B16A16_SNORM:
|
|
|
|
case TexturePixelFormat::R16G16_SNORM:
|
|
|
|
case TexturePixelFormat::R16_SNORM:
|
|
|
|
return 1.f / std::numeric_limits<short>::max();
|
|
|
|
default:
|
|
|
|
throw InvalidArgument("Invalid texture pixel format");
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
const IR::Value new_inst{&*block.PrependNewInst(it, inst)};
|
|
|
|
const IR::F32 x(ir.CompositeExtract(new_inst, 0));
|
|
|
|
const IR::F32 y(ir.CompositeExtract(new_inst, 1));
|
|
|
|
const IR::F32 z(ir.CompositeExtract(new_inst, 2));
|
|
|
|
const IR::F32 w(ir.CompositeExtract(new_inst, 3));
|
|
|
|
const IR::F16F32F64 max_value(ir.Imm32(get_max_value()));
|
|
|
|
const IR::Value converted =
|
2023-01-26 03:03:15 +00:00
|
|
|
ir.CompositeConstruct(ir.FPMul(ir.ConvertSToF(32, 32, ir.BitCast<IR::U32>(x)), max_value),
|
|
|
|
ir.FPMul(ir.ConvertSToF(32, 32, ir.BitCast<IR::U32>(y)), max_value),
|
|
|
|
ir.FPMul(ir.ConvertSToF(32, 32, ir.BitCast<IR::U32>(z)), max_value),
|
|
|
|
ir.FPMul(ir.ConvertSToF(32, 32, ir.BitCast<IR::U32>(w)), max_value));
|
2022-11-04 06:39:42 +00:00
|
|
|
inst.ReplaceUsesWith(converted);
|
|
|
|
}
|
2021-03-08 21:31:53 +00:00
|
|
|
} // Anonymous namespace
|
|
|
|
|
2022-11-11 02:32:53 +00:00
|
|
|
void TexturePass(Environment& env, IR::Program& program, const HostTranslateInfo& host_info) {
|
2021-03-08 21:31:53 +00:00
|
|
|
TextureInstVector to_replace;
|
2021-03-14 06:41:05 +00:00
|
|
|
for (IR::Block* const block : program.post_order_blocks) {
|
|
|
|
for (IR::Inst& inst : block->Instructions()) {
|
|
|
|
if (!IsTextureInstruction(inst)) {
|
|
|
|
continue;
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
2021-03-14 06:41:05 +00:00
|
|
|
to_replace.push_back(MakeInst(env, block, inst));
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// Sort instructions to visit textures by constant buffer index, then by offset
|
|
|
|
std::ranges::sort(to_replace, [](const auto& lhs, const auto& rhs) {
|
|
|
|
return lhs.cbuf.offset < rhs.cbuf.offset;
|
|
|
|
});
|
|
|
|
std::stable_sort(to_replace.begin(), to_replace.end(), [](const auto& lhs, const auto& rhs) {
|
|
|
|
return lhs.cbuf.index < rhs.cbuf.index;
|
|
|
|
});
|
2021-04-06 06:56:15 +01:00
|
|
|
Descriptors descriptors{
|
|
|
|
program.info.texture_buffer_descriptors,
|
2021-04-15 01:36:36 +01:00
|
|
|
program.info.image_buffer_descriptors,
|
2021-04-09 05:45:39 +01:00
|
|
|
program.info.texture_descriptors,
|
|
|
|
program.info.image_descriptors,
|
2021-04-06 06:56:15 +01:00
|
|
|
};
|
2021-03-08 21:31:53 +00:00
|
|
|
for (TextureInst& texture_inst : to_replace) {
|
|
|
|
// TODO: Handle arrays
|
|
|
|
IR::Inst* const inst{texture_inst.inst};
|
2021-03-26 21:45:38 +00:00
|
|
|
inst->ReplaceOpcode(IndexedInstruction(*inst));
|
|
|
|
|
|
|
|
const auto& cbuf{texture_inst.cbuf};
|
|
|
|
auto flags{inst->Flags<IR::TextureInstInfo>()};
|
2023-01-21 00:30:45 +00:00
|
|
|
bool is_multisample{false};
|
2021-04-06 03:25:22 +01:00
|
|
|
switch (inst->GetOpcode()) {
|
2021-04-06 06:56:15 +01:00
|
|
|
case IR::Opcode::ImageQueryDimensions:
|
2021-04-20 23:48:45 +01:00
|
|
|
flags.type.Assign(ReadTextureType(env, cbuf));
|
2021-03-26 21:45:38 +00:00
|
|
|
inst->SetFlags(flags);
|
2021-04-06 06:56:15 +01:00
|
|
|
break;
|
2022-08-10 03:10:32 +01:00
|
|
|
case IR::Opcode::ImageSampleImplicitLod:
|
2022-08-31 13:51:47 +01:00
|
|
|
if (flags.type != TextureType::Color2D) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if (ReadTextureType(env, cbuf) == TextureType::Color2DRect) {
|
|
|
|
PatchImageSampleImplicitLod(*texture_inst.block, *texture_inst.inst);
|
2022-08-10 03:10:32 +01:00
|
|
|
}
|
|
|
|
break;
|
2021-04-06 06:56:15 +01:00
|
|
|
case IR::Opcode::ImageFetch:
|
2023-01-21 00:30:45 +00:00
|
|
|
if (flags.type == TextureType::Color2D || flags.type == TextureType::Color2DRect ||
|
|
|
|
flags.type == TextureType::ColorArray2D) {
|
|
|
|
is_multisample = !inst->Arg(4).IsEmpty();
|
|
|
|
} else {
|
|
|
|
inst->SetArg(4, IR::U32{});
|
|
|
|
}
|
2021-04-06 06:56:15 +01:00
|
|
|
if (flags.type != TextureType::Color1D) {
|
|
|
|
break;
|
|
|
|
}
|
2021-04-20 23:48:45 +01:00
|
|
|
if (ReadTextureType(env, cbuf) == TextureType::Buffer) {
|
2021-04-06 06:56:15 +01:00
|
|
|
// Replace with the bound texture type only when it's a texture buffer
|
|
|
|
// If the instruction is 1D and the bound type is 2D, don't change the code and let
|
|
|
|
// the rasterizer robustness handle it
|
|
|
|
// This happens on Fire Emblem: Three Houses
|
|
|
|
flags.type.Assign(TextureType::Buffer);
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
u32 index;
|
2021-04-09 05:45:39 +01:00
|
|
|
switch (inst->GetOpcode()) {
|
|
|
|
case IR::Opcode::ImageRead:
|
2021-04-23 22:47:54 +01:00
|
|
|
case IR::Opcode::ImageAtomicIAdd32:
|
|
|
|
case IR::Opcode::ImageAtomicSMin32:
|
|
|
|
case IR::Opcode::ImageAtomicUMin32:
|
|
|
|
case IR::Opcode::ImageAtomicSMax32:
|
|
|
|
case IR::Opcode::ImageAtomicUMax32:
|
|
|
|
case IR::Opcode::ImageAtomicInc32:
|
|
|
|
case IR::Opcode::ImageAtomicDec32:
|
|
|
|
case IR::Opcode::ImageAtomicAnd32:
|
|
|
|
case IR::Opcode::ImageAtomicOr32:
|
|
|
|
case IR::Opcode::ImageAtomicXor32:
|
|
|
|
case IR::Opcode::ImageAtomicExchange32:
|
2021-04-09 05:45:39 +01:00
|
|
|
case IR::Opcode::ImageWrite: {
|
2021-04-20 23:48:45 +01:00
|
|
|
if (cbuf.has_secondary) {
|
|
|
|
throw NotImplementedException("Unexpected separate sampler");
|
|
|
|
}
|
2021-04-23 22:47:54 +01:00
|
|
|
const bool is_written{inst->GetOpcode() != IR::Opcode::ImageRead};
|
2021-06-18 02:44:12 +01:00
|
|
|
const bool is_read{inst->GetOpcode() != IR::Opcode::ImageWrite};
|
2021-04-09 05:45:39 +01:00
|
|
|
if (flags.type == TextureType::Buffer) {
|
2021-04-15 01:36:36 +01:00
|
|
|
index = descriptors.Add(ImageBufferDescriptor{
|
|
|
|
.format = flags.image_format,
|
|
|
|
.is_written = is_written,
|
2021-06-15 22:23:57 +01:00
|
|
|
.is_read = is_read,
|
2021-04-15 01:36:36 +01:00
|
|
|
.cbuf_index = cbuf.index,
|
|
|
|
.cbuf_offset = cbuf.offset,
|
2021-04-22 20:17:59 +01:00
|
|
|
.count = cbuf.count,
|
|
|
|
.size_shift = DESCRIPTOR_SIZE_SHIFT,
|
2021-04-15 01:36:36 +01:00
|
|
|
});
|
2021-04-09 05:45:39 +01:00
|
|
|
} else {
|
|
|
|
index = descriptors.Add(ImageDescriptor{
|
|
|
|
.type = flags.type,
|
|
|
|
.format = flags.image_format,
|
|
|
|
.is_written = is_written,
|
2021-06-15 22:23:57 +01:00
|
|
|
.is_read = is_read,
|
2021-04-09 05:45:39 +01:00
|
|
|
.cbuf_index = cbuf.index,
|
|
|
|
.cbuf_offset = cbuf.offset,
|
2021-04-22 20:17:59 +01:00
|
|
|
.count = cbuf.count,
|
|
|
|
.size_shift = DESCRIPTOR_SIZE_SHIFT,
|
2021-04-09 05:45:39 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
if (flags.type == TextureType::Buffer) {
|
|
|
|
index = descriptors.Add(TextureBufferDescriptor{
|
2021-04-20 23:48:45 +01:00
|
|
|
.has_secondary = cbuf.has_secondary,
|
2021-04-09 05:45:39 +01:00
|
|
|
.cbuf_index = cbuf.index,
|
|
|
|
.cbuf_offset = cbuf.offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = cbuf.shift_left,
|
2021-04-20 23:48:45 +01:00
|
|
|
.secondary_cbuf_index = cbuf.secondary_index,
|
|
|
|
.secondary_cbuf_offset = cbuf.secondary_offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = cbuf.secondary_shift_left,
|
2021-04-22 20:17:59 +01:00
|
|
|
.count = cbuf.count,
|
|
|
|
.size_shift = DESCRIPTOR_SIZE_SHIFT,
|
2021-04-09 05:45:39 +01:00
|
|
|
});
|
|
|
|
} else {
|
|
|
|
index = descriptors.Add(TextureDescriptor{
|
|
|
|
.type = flags.type,
|
|
|
|
.is_depth = flags.is_depth != 0,
|
2023-01-21 00:30:45 +00:00
|
|
|
.is_multisample = is_multisample,
|
2021-04-20 23:48:45 +01:00
|
|
|
.has_secondary = cbuf.has_secondary,
|
2021-04-09 05:45:39 +01:00
|
|
|
.cbuf_index = cbuf.index,
|
|
|
|
.cbuf_offset = cbuf.offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.shift_left = cbuf.shift_left,
|
2021-04-20 23:48:45 +01:00
|
|
|
.secondary_cbuf_index = cbuf.secondary_index,
|
|
|
|
.secondary_cbuf_offset = cbuf.secondary_offset,
|
2022-03-06 18:54:40 +00:00
|
|
|
.secondary_shift_left = cbuf.secondary_shift_left,
|
2021-04-22 20:17:59 +01:00
|
|
|
.count = cbuf.count,
|
|
|
|
.size_shift = DESCRIPTOR_SIZE_SHIFT,
|
2021-04-09 05:45:39 +01:00
|
|
|
});
|
|
|
|
}
|
|
|
|
break;
|
2021-03-26 21:45:38 +00:00
|
|
|
}
|
2021-04-22 20:17:59 +01:00
|
|
|
flags.descriptor_index.Assign(index);
|
|
|
|
inst->SetFlags(flags);
|
|
|
|
|
|
|
|
if (cbuf.count > 1) {
|
|
|
|
const auto insert_point{IR::Block::InstructionList::s_iterator_to(*inst)};
|
|
|
|
IR::IREmitter ir{*texture_inst.block, insert_point};
|
|
|
|
const IR::U32 shift{ir.Imm32(std::countr_zero(DESCRIPTOR_SIZE))};
|
2021-10-24 19:46:36 +01:00
|
|
|
inst->SetArg(0, ir.UMin(ir.ShiftRightArithmetic(cbuf.dynamic_offset, shift),
|
2021-09-24 00:14:49 +01:00
|
|
|
ir.Imm32(DESCRIPTOR_SIZE - 1)));
|
2021-04-22 20:17:59 +01:00
|
|
|
} else {
|
|
|
|
inst->SetArg(0, IR::Value{});
|
|
|
|
}
|
2022-11-04 06:39:42 +00:00
|
|
|
|
2022-11-11 02:32:53 +00:00
|
|
|
if (!host_info.support_snorm_render_buffer && inst->GetOpcode() == IR::Opcode::ImageFetch &&
|
|
|
|
flags.type == TextureType::Buffer) {
|
2022-11-04 06:39:42 +00:00
|
|
|
const auto pixel_format = ReadTexturePixelFormat(env, cbuf);
|
|
|
|
if (pixel_format != TexturePixelFormat::OTHER) {
|
2022-11-11 02:32:53 +00:00
|
|
|
PatchTexelFetch(*texture_inst.block, *texture_inst.inst, pixel_format);
|
2022-11-04 06:39:42 +00:00
|
|
|
}
|
|
|
|
}
|
2021-03-08 21:31:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-19 00:03:38 +01:00
|
|
|
void JoinTextureInfo(Info& base, Info& source) {
|
|
|
|
Descriptors descriptors{
|
|
|
|
base.texture_buffer_descriptors,
|
|
|
|
base.image_buffer_descriptors,
|
|
|
|
base.texture_descriptors,
|
|
|
|
base.image_descriptors,
|
|
|
|
};
|
|
|
|
for (auto& desc : source.texture_buffer_descriptors) {
|
|
|
|
descriptors.Add(desc);
|
|
|
|
}
|
|
|
|
for (auto& desc : source.image_buffer_descriptors) {
|
|
|
|
descriptors.Add(desc);
|
|
|
|
}
|
|
|
|
for (auto& desc : source.texture_descriptors) {
|
|
|
|
descriptors.Add(desc);
|
|
|
|
}
|
|
|
|
for (auto& desc : source.image_descriptors) {
|
|
|
|
descriptors.Add(desc);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-08 21:31:53 +00:00
|
|
|
} // namespace Shader::Optimization
|