2021-05-26 04:16:20 +00:00
|
|
|
// Copyright 2021 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
#include <string_view>
|
|
|
|
|
|
|
|
#include "shader_recompiler/backend/glsl/emit_context.h"
|
2021-05-29 06:09:29 +00:00
|
|
|
#include "shader_recompiler/backend/glsl/emit_glsl_instructions.h"
|
2021-05-27 01:18:17 +00:00
|
|
|
#include "shader_recompiler/frontend/ir/modifiers.h"
|
2021-05-26 04:16:20 +00:00
|
|
|
#include "shader_recompiler/frontend/ir/value.h"
|
|
|
|
|
|
|
|
namespace Shader::Backend::GLSL {
|
2021-05-27 01:18:17 +00:00
|
|
|
namespace {
|
2021-05-29 05:06:29 +00:00
|
|
|
std::string Texture(EmitContext& ctx, const IR::TextureInstInfo& info,
|
2021-05-27 01:18:17 +00:00
|
|
|
[[maybe_unused]] const IR::Value& index) {
|
|
|
|
if (info.type == TextureType::Buffer) {
|
|
|
|
throw NotImplementedException("TextureType::Buffer");
|
|
|
|
} else {
|
|
|
|
return fmt::format("tex{}", ctx.texture_bindings.at(info.descriptor_index));
|
|
|
|
}
|
|
|
|
}
|
2021-05-29 05:06:29 +00:00
|
|
|
|
|
|
|
std::string CastToIntVec(std::string_view value, const IR::TextureInstInfo& info) {
|
|
|
|
switch (info.type) {
|
|
|
|
case TextureType::Color1D:
|
|
|
|
return fmt::format("int({})", value);
|
|
|
|
case TextureType::ColorArray1D:
|
|
|
|
case TextureType::Color2D:
|
|
|
|
case TextureType::ColorArray2D:
|
2021-05-29 18:10:24 +00:00
|
|
|
return fmt::format("ivec2({})", value);
|
2021-05-29 05:06:29 +00:00
|
|
|
case TextureType::Color3D:
|
|
|
|
case TextureType::ColorCube:
|
|
|
|
return fmt::format("ivec3({})", value);
|
|
|
|
case TextureType::ColorArrayCube:
|
|
|
|
return fmt::format("ivec4({})", value);
|
|
|
|
default:
|
|
|
|
throw NotImplementedException("Offset type {}", info.type.Value());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-29 19:03:28 +00:00
|
|
|
std::string TexelFetchCastToInt(std::string_view value, const IR::TextureInstInfo& info) {
|
|
|
|
switch (info.type) {
|
|
|
|
case TextureType::Color1D:
|
|
|
|
return fmt::format("int({})", value);
|
|
|
|
case TextureType::ColorArray1D:
|
|
|
|
case TextureType::Color2D:
|
|
|
|
return fmt::format("ivec2({})", value);
|
|
|
|
case TextureType::ColorArray2D:
|
|
|
|
case TextureType::Color3D:
|
|
|
|
case TextureType::ColorCube:
|
|
|
|
return fmt::format("ivec3({})", value);
|
|
|
|
case TextureType::ColorArrayCube:
|
|
|
|
return fmt::format("ivec4({})", value);
|
|
|
|
default:
|
|
|
|
throw NotImplementedException("Offset type {}", info.type.Value());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-29 18:21:25 +00:00
|
|
|
std::string ShadowSamplerVecCast(TextureType type) {
|
|
|
|
switch (type) {
|
|
|
|
case TextureType::ColorArray2D:
|
|
|
|
case TextureType::ColorCube:
|
|
|
|
case TextureType::ColorArrayCube:
|
|
|
|
return "vec4";
|
|
|
|
default:
|
|
|
|
return "vec3";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-29 20:58:33 +00:00
|
|
|
std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) {
|
|
|
|
const std::array values{offset.InstRecursive(), offset2.InstRecursive()};
|
|
|
|
if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) {
|
|
|
|
// LOG_WARNING("Not all arguments in PTP are immediate, STUBBING");
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
const IR::Opcode opcode{values[0]->GetOpcode()};
|
|
|
|
if (opcode != values[1]->GetOpcode() || opcode != IR::Opcode::CompositeConstructU32x4) {
|
|
|
|
throw LogicError("Invalid PTP arguments");
|
|
|
|
}
|
|
|
|
auto read{[&](unsigned int a, unsigned int b) { return values[a]->Arg(b).U32(); }};
|
|
|
|
|
|
|
|
return fmt::format("ivec2[](ivec2({},{}),ivec2({},{}),ivec2({},{}),ivec2({},{}))", read(0, 0),
|
|
|
|
read(0, 1), read(0, 2), read(0, 3), read(1, 0), read(1, 1), read(1, 2),
|
|
|
|
read(1, 3));
|
|
|
|
}
|
|
|
|
|
2021-05-29 05:06:29 +00:00
|
|
|
IR::Inst* PrepareSparse(IR::Inst& inst) {
|
|
|
|
const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)};
|
|
|
|
if (sparse_inst) {
|
|
|
|
sparse_inst->Invalidate();
|
|
|
|
}
|
|
|
|
return sparse_inst;
|
|
|
|
}
|
2021-05-27 01:18:17 +00:00
|
|
|
} // namespace
|
2021-05-26 04:16:20 +00:00
|
|
|
|
|
|
|
void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view bias_lc,
|
|
|
|
[[maybe_unused]] const IR::Value& offset) {
|
2021-05-27 01:18:17 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
if (info.has_lod_clamp) {
|
|
|
|
throw NotImplementedException("Lod clamp samples");
|
|
|
|
}
|
2021-05-28 02:28:33 +00:00
|
|
|
const auto texture{Texture(ctx, info, index)};
|
2021-05-29 05:06:29 +00:00
|
|
|
const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)};
|
2021-05-29 05:06:29 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (!sparse_inst) {
|
|
|
|
if (!offset.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info), bias);
|
2021-05-29 05:06:29 +00:00
|
|
|
} else {
|
2021-05-29 18:10:24 +00:00
|
|
|
if (ctx.stage == Stage::Fragment) {
|
|
|
|
ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias);
|
|
|
|
} else {
|
|
|
|
ctx.Add("{}=textureLod({},{},0.0);", texel, texture, coords);
|
|
|
|
}
|
2021-05-29 05:06:29 +00:00
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// TODO: Query sparseTexels extension support
|
2021-05-27 01:18:17 +00:00
|
|
|
if (!offset.IsEmpty()) {
|
2021-05-29 05:06:29 +00:00
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));",
|
2021-05-30 21:27:00 +00:00
|
|
|
*sparse_inst, texture, coords, CastToIntVec(ctx.var_alloc.Consume(offset), info),
|
2021-05-29 05:06:29 +00:00
|
|
|
texel, bias);
|
2021-05-28 02:28:33 +00:00
|
|
|
} else {
|
2021-05-29 05:06:29 +00:00
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst,
|
|
|
|
texture, coords, texel, bias);
|
2021-05-28 00:31:03 +00:00
|
|
|
}
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view lod_lc,
|
|
|
|
[[maybe_unused]] const IR::Value& offset) {
|
2021-05-28 17:55:07 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
if (info.has_bias) {
|
|
|
|
throw NotImplementedException("Bias texture samples");
|
|
|
|
}
|
|
|
|
if (info.has_lod_clamp) {
|
|
|
|
throw NotImplementedException("Lod clamp samples");
|
|
|
|
}
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)};
|
2021-05-29 05:06:29 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (!sparse_inst) {
|
|
|
|
if (!offset.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info));
|
2021-05-29 05:06:29 +00:00
|
|
|
} else {
|
|
|
|
ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
2021-05-29 18:10:24 +00:00
|
|
|
// TODO: Query sparseTexels extension support
|
2021-05-28 17:55:07 +00:00
|
|
|
if (!offset.IsEmpty()) {
|
2021-05-29 05:06:29 +00:00
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info), lod_lc,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info), texel);
|
2021-05-28 17:55:07 +00:00
|
|
|
} else {
|
2021-05-29 05:06:29 +00:00
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst,
|
|
|
|
texture, coords, lod_lc, texel);
|
2021-05-28 17:55:07 +00:00
|
|
|
}
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageSampleDrefImplicitLod([[maybe_unused]] EmitContext& ctx,
|
|
|
|
[[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view dref,
|
|
|
|
[[maybe_unused]] std::string_view bias_lc,
|
|
|
|
[[maybe_unused]] const IR::Value& offset) {
|
2021-05-29 18:21:25 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
2021-05-29 19:03:28 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (sparse_inst) {
|
|
|
|
throw NotImplementedException("Sparse texture samples");
|
|
|
|
}
|
2021-05-29 18:21:25 +00:00
|
|
|
if (info.has_bias) {
|
|
|
|
throw NotImplementedException("Bias texture samples");
|
|
|
|
}
|
|
|
|
if (info.has_lod_clamp) {
|
|
|
|
throw NotImplementedException("Lod clamp samples");
|
|
|
|
}
|
|
|
|
if (!offset.IsEmpty()) {
|
|
|
|
throw NotImplementedException("textureLodOffset");
|
|
|
|
}
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
|
|
|
const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""};
|
|
|
|
const auto cast{ShadowSamplerVecCast(info.type)};
|
|
|
|
if (ctx.stage == Stage::Fragment) {
|
|
|
|
ctx.AddF32("{}=texture({},{}({},{}){});", inst, texture, cast, coords, dref, bias);
|
|
|
|
} else {
|
|
|
|
ctx.AddF32("{}=textureLod({},{}({},{}),0.0);", inst, texture, cast, coords, dref);
|
|
|
|
}
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageSampleDrefExplicitLod([[maybe_unused]] EmitContext& ctx,
|
|
|
|
[[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view dref,
|
|
|
|
[[maybe_unused]] std::string_view lod_lc,
|
|
|
|
[[maybe_unused]] const IR::Value& offset) {
|
2021-05-29 05:53:32 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
2021-05-29 19:03:28 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (sparse_inst) {
|
|
|
|
throw NotImplementedException("Sparse texture samples");
|
|
|
|
}
|
2021-05-29 05:53:32 +00:00
|
|
|
if (info.has_bias) {
|
|
|
|
throw NotImplementedException("Bias texture samples");
|
|
|
|
}
|
|
|
|
if (info.has_lod_clamp) {
|
|
|
|
throw NotImplementedException("Lod clamp samples");
|
|
|
|
}
|
2021-05-29 18:10:24 +00:00
|
|
|
if (!offset.IsEmpty()) {
|
|
|
|
throw NotImplementedException("textureLodOffset");
|
|
|
|
}
|
2021-05-29 05:53:32 +00:00
|
|
|
const auto texture{Texture(ctx, info, index)};
|
|
|
|
if (info.type == TextureType::ColorArrayCube) {
|
|
|
|
ctx.AddF32("{}=textureLod({},{},{},{});", inst, texture, coords, dref, lod_lc);
|
|
|
|
} else {
|
|
|
|
ctx.AddF32("{}=textureLod({},vec3({},{}),{});", inst, texture, coords, dref, lod_lc);
|
|
|
|
}
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] const IR::Value& offset,
|
|
|
|
[[maybe_unused]] const IR::Value& offset2) {
|
2021-05-29 20:58:33 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)};
|
2021-05-29 20:58:33 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (!sparse_inst) {
|
|
|
|
if (offset.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureGather({},{},int({}));", texel, texture, coords,
|
|
|
|
info.gather_component);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (offset2.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info), info.gather_component);
|
2021-05-29 20:58:33 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
// PTP
|
|
|
|
const auto offsets{PtpOffsets(offset, offset2)};
|
|
|
|
ctx.Add("{}=textureGatherOffsets({},{},{},int({}));", texel, texture, coords, offsets,
|
|
|
|
info.gather_component);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// TODO: Query sparseTexels extension support
|
|
|
|
if (offset.IsEmpty()) {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},int({})));",
|
|
|
|
*sparse_inst, texture, coords, texel, info.gather_component);
|
|
|
|
}
|
|
|
|
if (offset2.IsEmpty()) {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info),
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info), texel, info.gather_component);
|
2021-05-29 20:58:33 +00:00
|
|
|
}
|
|
|
|
// PTP
|
|
|
|
const auto offsets{PtpOffsets(offset, offset2)};
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info), offsets, texel,
|
|
|
|
info.gather_component);
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] const IR::Value& offset,
|
|
|
|
[[maybe_unused]] const IR::Value& offset2,
|
|
|
|
[[maybe_unused]] std::string_view dref) {
|
2021-05-29 20:58:33 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)};
|
2021-05-29 20:58:33 +00:00
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
|
|
|
if (!sparse_inst) {
|
|
|
|
if (offset.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureGather({},{},{});", texel, texture, coords, dref);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if (offset2.IsEmpty()) {
|
|
|
|
ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info));
|
2021-05-29 20:58:33 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
// PTP
|
|
|
|
const auto offsets{PtpOffsets(offset, offset2)};
|
|
|
|
ctx.Add("{}=textureGatherOffsets({},{},{},{});", texel, texture, coords, dref, offsets);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// TODO: Query sparseTexels extension support
|
|
|
|
if (offset.IsEmpty()) {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},{}));", *sparse_inst,
|
|
|
|
texture, coords, dref, texel);
|
|
|
|
}
|
|
|
|
if (offset2.IsEmpty()) {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info), dref,
|
2021-05-30 21:27:00 +00:00
|
|
|
CastToIntVec(ctx.var_alloc.Consume(offset), info), texel);
|
2021-05-29 20:58:33 +00:00
|
|
|
}
|
|
|
|
// PTP
|
|
|
|
const auto offsets{PtpOffsets(offset, offset2)};
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info), dref, offsets, texel);
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view offset, [[maybe_unused]] std::string_view lod,
|
|
|
|
[[maybe_unused]] std::string_view ms) {
|
2021-05-29 19:03:28 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
if (info.has_bias) {
|
|
|
|
throw NotImplementedException("Bias texture samples");
|
|
|
|
}
|
|
|
|
if (info.has_lod_clamp) {
|
|
|
|
throw NotImplementedException("Lod clamp samples");
|
|
|
|
}
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
|
|
|
const auto sparse_inst{PrepareSparse(inst)};
|
2021-05-30 21:27:00 +00:00
|
|
|
const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)};
|
2021-05-29 19:03:28 +00:00
|
|
|
if (!sparse_inst) {
|
|
|
|
if (!offset.empty()) {
|
|
|
|
ctx.Add("{}=texelFetchOffset({},{},int({}),{});", texel, texture,
|
|
|
|
TexelFetchCastToInt(coords, info), lod, TexelFetchCastToInt(offset, info));
|
|
|
|
} else {
|
|
|
|
ctx.Add("{}=texelFetch({},{},int({}));", texel, texture,
|
|
|
|
TexelFetchCastToInt(coords, info), lod);
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// TODO: Query sparseTexels extension support
|
|
|
|
if (!offset.empty()) {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));",
|
|
|
|
*sparse_inst, texture, CastToIntVec(coords, info), lod,
|
|
|
|
CastToIntVec(offset, info), texel);
|
|
|
|
} else {
|
|
|
|
ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchARB({},{},{},{}));", *sparse_inst,
|
|
|
|
texture, CastToIntVec(coords, info), lod, texel);
|
|
|
|
}
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageQueryDimensions([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view lod) {
|
2021-05-29 22:08:19 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
|
|
|
switch (info.type) {
|
|
|
|
case TextureType::Color1D:
|
|
|
|
return ctx.AddU32x4(
|
|
|
|
"{}=uvec4(uint(textureSize({},int({}))),0u,0u,uint(textureQueryLevels({})));", inst,
|
|
|
|
texture, lod, texture);
|
|
|
|
case TextureType::ColorArray1D:
|
|
|
|
case TextureType::Color2D:
|
|
|
|
case TextureType::ColorCube:
|
|
|
|
return ctx.AddU32x4(
|
|
|
|
"{}=uvec4(uvec2(textureSize({},int({}))),0u,uint(textureQueryLevels({})));", inst,
|
|
|
|
texture, lod, texture);
|
|
|
|
case TextureType::ColorArray2D:
|
|
|
|
case TextureType::Color3D:
|
|
|
|
case TextureType::ColorArrayCube:
|
|
|
|
return ctx.AddU32x4(
|
|
|
|
"{}=uvec4(uvec3(textureSize({},int({}))),uint(textureQueryLevels({})));", inst, texture,
|
|
|
|
lod, texture);
|
|
|
|
case TextureType::Buffer:
|
|
|
|
throw NotImplementedException("Texture buffers");
|
|
|
|
}
|
|
|
|
throw LogicError("Unspecified image type {}", info.type.Value());
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageQueryLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords) {
|
2021-05-30 01:12:52 +00:00
|
|
|
const auto info{inst.Flags<IR::TextureInstInfo>()};
|
|
|
|
const auto texture{Texture(ctx, info, index)};
|
|
|
|
return ctx.AddF32x4("{}=vec4(textureQueryLod({},{}),0.0,0.0);", inst, texture, coords);
|
2021-05-26 04:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageGradient([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view derivates,
|
|
|
|
[[maybe_unused]] std::string_view offset,
|
|
|
|
[[maybe_unused]] std::string_view lod_clamp) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageRead([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitImageWrite([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
|
|
|
|
[[maybe_unused]] const IR::Value& index,
|
|
|
|
[[maybe_unused]] std::string_view coords,
|
|
|
|
[[maybe_unused]] std::string_view color) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageSampleImplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageSampleExplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageSampleDrefImplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageSampleDrefExplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageGather(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageGatherDref(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageFetch(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageQueryDimensions(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageQueryLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageGradient(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageRead(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBindlessImageWrite(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageSampleImplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageSampleExplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageSampleDrefImplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageSampleDrefExplicitLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageGather(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageGatherDref(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageFetch(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageQueryDimensions(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageQueryLod(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageGradient(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageRead(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
void EmitBoundImageWrite(EmitContext&) {
|
|
|
|
throw NotImplementedException("GLSL Instruction");
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace Shader::Backend::GLSL
|