2019-01-22 23:49:31 +00:00
|
|
|
// Copyright 2018 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2019-07-12 00:54:07 +00:00
|
|
|
#include <bitset>
|
2019-03-06 01:25:01 +00:00
|
|
|
#include "common/assert.h"
|
2019-01-22 23:49:31 +00:00
|
|
|
#include "common/logging/log.h"
|
2019-04-22 23:05:43 +00:00
|
|
|
#include "core/core.h"
|
2019-01-22 23:49:31 +00:00
|
|
|
#include "video_core/engines/kepler_compute.h"
|
2019-04-22 23:05:43 +00:00
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
2019-11-18 21:35:21 +00:00
|
|
|
#include "video_core/engines/shader_type.h"
|
2019-01-22 23:49:31 +00:00
|
|
|
#include "video_core/memory_manager.h"
|
2019-04-22 23:05:43 +00:00
|
|
|
#include "video_core/rasterizer_interface.h"
|
|
|
|
#include "video_core/renderer_base.h"
|
|
|
|
#include "video_core/textures/decoders.h"
|
2019-01-22 23:49:31 +00:00
|
|
|
|
|
|
|
namespace Tegra::Engines {
|
|
|
|
|
2019-04-22 23:05:43 +00:00
|
|
|
KeplerCompute::KeplerCompute(Core::System& system, VideoCore::RasterizerInterface& rasterizer,
|
|
|
|
MemoryManager& memory_manager)
|
|
|
|
: system{system}, rasterizer{rasterizer}, memory_manager{memory_manager}, upload_state{
|
|
|
|
memory_manager,
|
|
|
|
regs.upload} {}
|
2019-01-22 23:49:31 +00:00
|
|
|
|
|
|
|
KeplerCompute::~KeplerCompute() = default;
|
|
|
|
|
|
|
|
void KeplerCompute::CallMethod(const GPU::MethodCall& method_call) {
|
|
|
|
ASSERT_MSG(method_call.method < Regs::NUM_REGS,
|
|
|
|
"Invalid KeplerCompute register, increase the size of the Regs structure");
|
|
|
|
|
|
|
|
regs.reg_array[method_call.method] = method_call.argument;
|
|
|
|
|
|
|
|
switch (method_call.method) {
|
2019-04-22 23:05:43 +00:00
|
|
|
case KEPLER_COMPUTE_REG_INDEX(exec_upload): {
|
|
|
|
upload_state.ProcessExec(regs.exec_upload.linear != 0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case KEPLER_COMPUTE_REG_INDEX(data_upload): {
|
2019-04-23 12:02:24 +00:00
|
|
|
const bool is_last_call = method_call.IsLastCall();
|
2019-04-22 23:05:43 +00:00
|
|
|
upload_state.ProcessData(method_call.argument, is_last_call);
|
2019-12-27 01:14:10 +00:00
|
|
|
if (is_last_call) {
|
|
|
|
system.GPU().Maxwell3D().OnMemoryWrite();
|
|
|
|
}
|
2019-04-22 23:05:43 +00:00
|
|
|
break;
|
|
|
|
}
|
2019-01-22 23:49:31 +00:00
|
|
|
case KEPLER_COMPUTE_REG_INDEX(launch):
|
2019-04-22 23:05:43 +00:00
|
|
|
ProcessLaunch();
|
2019-01-22 23:49:31 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-20 07:03:33 +00:00
|
|
|
Texture::FullTextureInfo KeplerCompute::GetTexture(std::size_t offset) const {
|
2019-07-12 00:54:07 +00:00
|
|
|
const std::bitset<8> cbuf_mask = launch_description.const_buffer_enable_mask.Value();
|
|
|
|
ASSERT(cbuf_mask[regs.tex_cb_index]);
|
|
|
|
|
|
|
|
const auto& texinfo = launch_description.const_buffer_config[regs.tex_cb_index];
|
|
|
|
ASSERT(texinfo.Address() != 0);
|
|
|
|
|
|
|
|
const GPUVAddr address = texinfo.Address() + offset * sizeof(Texture::TextureHandle);
|
|
|
|
ASSERT(address < texinfo.Address() + texinfo.size);
|
|
|
|
|
|
|
|
const Texture::TextureHandle tex_handle{memory_manager.Read<u32>(address)};
|
2019-10-20 07:03:33 +00:00
|
|
|
return GetTextureInfo(tex_handle);
|
2019-07-12 00:54:07 +00:00
|
|
|
}
|
|
|
|
|
2019-10-20 07:03:33 +00:00
|
|
|
Texture::FullTextureInfo KeplerCompute::GetTextureInfo(Texture::TextureHandle tex_handle) const {
|
|
|
|
return Texture::FullTextureInfo{GetTICEntry(tex_handle.tic_id), GetTSCEntry(tex_handle.tsc_id)};
|
2019-07-12 00:54:07 +00:00
|
|
|
}
|
|
|
|
|
2019-09-23 18:02:02 +00:00
|
|
|
u32 KeplerCompute::AccessConstBuffer32(ShaderType stage, u64 const_buffer, u64 offset) const {
|
|
|
|
ASSERT(stage == ShaderType::Compute);
|
2019-07-12 00:54:07 +00:00
|
|
|
const auto& buffer = launch_description.const_buffer_config[const_buffer];
|
|
|
|
u32 result;
|
|
|
|
std::memcpy(&result, memory_manager.GetPointer(buffer.Address() + offset), sizeof(u32));
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-09-25 13:53:18 +00:00
|
|
|
SamplerDescriptor KeplerCompute::AccessBoundSampler(ShaderType stage, u64 offset) const {
|
|
|
|
return AccessBindlessSampler(stage, regs.tex_cb_index, offset * sizeof(Texture::TextureHandle));
|
|
|
|
}
|
|
|
|
|
|
|
|
SamplerDescriptor KeplerCompute::AccessBindlessSampler(ShaderType stage, u64 const_buffer,
|
|
|
|
u64 offset) const {
|
|
|
|
ASSERT(stage == ShaderType::Compute);
|
|
|
|
const auto& tex_info_buffer = launch_description.const_buffer_config[const_buffer];
|
2019-10-01 00:55:25 +00:00
|
|
|
const GPUVAddr tex_info_address = tex_info_buffer.Address() + offset;
|
2019-09-25 13:53:18 +00:00
|
|
|
|
|
|
|
const Texture::TextureHandle tex_handle{memory_manager.Read<u32>(tex_info_address)};
|
2019-10-20 07:03:33 +00:00
|
|
|
const Texture::FullTextureInfo tex_info = GetTextureInfo(tex_handle);
|
2019-09-25 13:53:18 +00:00
|
|
|
SamplerDescriptor result = SamplerDescriptor::FromTicTexture(tex_info.tic.texture_type.Value());
|
|
|
|
result.is_shadow.Assign(tex_info.tsc.depth_compare_enabled.Value());
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2020-01-03 20:16:29 +00:00
|
|
|
VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() {
|
|
|
|
return rasterizer.AccessGuestDriverProfile();
|
|
|
|
}
|
|
|
|
|
2020-01-08 14:28:29 +00:00
|
|
|
const VideoCore::GuestDriverProfile& KeplerCompute::AccessGuestDriverProfile() const {
|
|
|
|
return rasterizer.AccessGuestDriverProfile();
|
|
|
|
}
|
|
|
|
|
2019-04-22 23:05:43 +00:00
|
|
|
void KeplerCompute::ProcessLaunch() {
|
|
|
|
const GPUVAddr launch_desc_loc = regs.launch_desc_loc.Address();
|
|
|
|
memory_manager.ReadBlockUnsafe(launch_desc_loc, &launch_description,
|
|
|
|
LaunchParams::NUM_LAUNCH_PARAMETERS * sizeof(u32));
|
|
|
|
|
2019-07-15 01:25:13 +00:00
|
|
|
const GPUVAddr code_addr = regs.code_loc.Address() + launch_description.program_start;
|
|
|
|
LOG_TRACE(HW_GPU, "Compute invocation launched at address 0x{:016x}", code_addr);
|
|
|
|
|
|
|
|
rasterizer.DispatchCompute(code_addr);
|
2019-04-22 23:05:43 +00:00
|
|
|
}
|
|
|
|
|
2019-07-12 00:54:07 +00:00
|
|
|
Texture::TICEntry KeplerCompute::GetTICEntry(u32 tic_index) const {
|
|
|
|
const GPUVAddr tic_address_gpu{regs.tic.Address() + tic_index * sizeof(Texture::TICEntry)};
|
|
|
|
|
|
|
|
Texture::TICEntry tic_entry;
|
|
|
|
memory_manager.ReadBlockUnsafe(tic_address_gpu, &tic_entry, sizeof(Texture::TICEntry));
|
|
|
|
|
|
|
|
const auto r_type{tic_entry.r_type.Value()};
|
|
|
|
const auto g_type{tic_entry.g_type.Value()};
|
|
|
|
const auto b_type{tic_entry.b_type.Value()};
|
|
|
|
const auto a_type{tic_entry.a_type.Value()};
|
|
|
|
|
|
|
|
// TODO(Subv): Different data types for separate components are not supported
|
|
|
|
DEBUG_ASSERT(r_type == g_type && r_type == b_type && r_type == a_type);
|
|
|
|
|
|
|
|
return tic_entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
Texture::TSCEntry KeplerCompute::GetTSCEntry(u32 tsc_index) const {
|
|
|
|
const GPUVAddr tsc_address_gpu{regs.tsc.Address() + tsc_index * sizeof(Texture::TSCEntry)};
|
|
|
|
|
|
|
|
Texture::TSCEntry tsc_entry;
|
|
|
|
memory_manager.ReadBlockUnsafe(tsc_address_gpu, &tsc_entry, sizeof(Texture::TSCEntry));
|
|
|
|
return tsc_entry;
|
|
|
|
}
|
|
|
|
|
2019-01-22 23:49:31 +00:00
|
|
|
} // namespace Tegra::Engines
|