2018-11-24 04:20:56 +00:00
|
|
|
// Copyright 2018 yuzu Emulator Project
|
|
|
|
// Licensed under GPLv2 or any later version
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
2020-10-30 04:13:48 +00:00
|
|
|
#include "common/cityhash.h"
|
2018-11-27 23:42:21 +00:00
|
|
|
#include "common/microprofile.h"
|
2018-11-24 04:20:56 +00:00
|
|
|
#include "core/core.h"
|
|
|
|
#include "core/memory.h"
|
|
|
|
#include "video_core/dma_pusher.h"
|
|
|
|
#include "video_core/engines/maxwell_3d.h"
|
|
|
|
#include "video_core/gpu.h"
|
2019-04-06 03:59:54 +00:00
|
|
|
#include "video_core/memory_manager.h"
|
2018-11-24 04:20:56 +00:00
|
|
|
|
|
|
|
namespace Tegra {
|
|
|
|
|
2020-12-04 19:39:12 +00:00
|
|
|
DmaPusher::DmaPusher(Core::System& system_, GPU& gpu_) : gpu{gpu_}, system{system_} {}
|
2018-11-24 04:20:56 +00:00
|
|
|
|
|
|
|
DmaPusher::~DmaPusher() = default;
|
|
|
|
|
2018-11-27 23:42:21 +00:00
|
|
|
MICROPROFILE_DEFINE(DispatchCalls, "GPU", "Execute command buffer", MP_RGB(128, 128, 192));
|
|
|
|
|
2018-11-24 04:20:56 +00:00
|
|
|
void DmaPusher::DispatchCalls() {
|
2018-11-27 23:42:21 +00:00
|
|
|
MICROPROFILE_SCOPE(DispatchCalls);
|
|
|
|
|
2020-02-16 14:08:07 +00:00
|
|
|
gpu.SyncGuestHost();
|
2019-12-27 01:14:10 +00:00
|
|
|
|
2018-11-28 00:17:33 +00:00
|
|
|
dma_pushbuffer_subindex = 0;
|
|
|
|
|
2020-04-28 02:07:21 +00:00
|
|
|
dma_state.is_last_call = true;
|
|
|
|
|
2020-04-19 20:12:06 +00:00
|
|
|
while (system.IsPoweredOn()) {
|
2018-11-24 04:20:56 +00:00
|
|
|
if (!Step()) {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2019-07-26 18:20:43 +00:00
|
|
|
gpu.FlushCommands();
|
2020-02-16 13:51:37 +00:00
|
|
|
gpu.SyncGuestHost();
|
2020-02-16 20:24:37 +00:00
|
|
|
gpu.OnCommandListEnd();
|
2018-11-24 04:20:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
bool DmaPusher::Step() {
|
2019-02-19 09:26:58 +00:00
|
|
|
if (!ib_enable || dma_pushbuffer.empty()) {
|
|
|
|
// pushbuffer empty and IB empty or nonexistent - nothing to do
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2020-10-30 04:13:04 +00:00
|
|
|
CommandList& command_list{dma_pushbuffer.front()};
|
|
|
|
|
|
|
|
ASSERT_OR_EXECUTE(
|
|
|
|
command_list.command_lists.size() || command_list.prefetch_command_list.size(), {
|
|
|
|
// Somehow the command_list is empty, in order to avoid a crash
|
|
|
|
// We ignore it and assume its size is 0.
|
|
|
|
dma_pushbuffer.pop();
|
|
|
|
dma_pushbuffer_subindex = 0;
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
|
|
|
|
if (command_list.prefetch_command_list.size()) {
|
|
|
|
// Prefetched command list from nvdrv, used for things like synchronization
|
|
|
|
command_headers = std::move(command_list.prefetch_command_list);
|
2019-05-19 00:51:54 +00:00
|
|
|
dma_pushbuffer.pop();
|
2020-10-30 04:13:04 +00:00
|
|
|
} else {
|
|
|
|
const CommandListHeader command_list_header{
|
2020-11-07 08:08:19 +00:00
|
|
|
command_list.command_lists[dma_pushbuffer_subindex++]};
|
2020-10-30 04:13:04 +00:00
|
|
|
const GPUVAddr dma_get = command_list_header.addr;
|
|
|
|
|
|
|
|
if (dma_pushbuffer_subindex >= command_list.command_lists.size()) {
|
|
|
|
// We've gone through the current list, remove it from the queue
|
|
|
|
dma_pushbuffer.pop();
|
|
|
|
dma_pushbuffer_subindex = 0;
|
|
|
|
}
|
2019-02-19 09:26:58 +00:00
|
|
|
|
2020-10-30 04:13:04 +00:00
|
|
|
if (command_list_header.size == 0) {
|
|
|
|
return true;
|
|
|
|
}
|
2019-02-19 09:26:58 +00:00
|
|
|
|
2020-10-30 04:13:04 +00:00
|
|
|
// Push buffer non-empty, read a word
|
|
|
|
command_headers.resize(command_list_header.size);
|
|
|
|
gpu.MemoryManager().ReadBlockUnsafe(dma_get, command_headers.data(),
|
|
|
|
command_list_header.size * sizeof(u32));
|
|
|
|
}
|
2020-04-20 06:16:56 +00:00
|
|
|
for (std::size_t index = 0; index < command_headers.size();) {
|
|
|
|
const CommandHeader& command_header = command_headers[index];
|
|
|
|
|
|
|
|
if (dma_state.method_count) {
|
2019-02-19 09:26:58 +00:00
|
|
|
// Data word of methods command
|
2020-04-20 06:16:56 +00:00
|
|
|
if (dma_state.non_incrementing) {
|
|
|
|
const u32 max_write = static_cast<u32>(
|
|
|
|
std::min<std::size_t>(index + dma_state.method_count, command_headers.size()) -
|
|
|
|
index);
|
|
|
|
CallMultiMethod(&command_header.argument, max_write);
|
|
|
|
dma_state.method_count -= max_write;
|
2020-04-28 02:07:21 +00:00
|
|
|
dma_state.is_last_call = true;
|
2020-04-20 06:16:56 +00:00
|
|
|
index += max_write;
|
|
|
|
continue;
|
|
|
|
} else {
|
2020-04-28 02:07:21 +00:00
|
|
|
dma_state.is_last_call = dma_state.method_count <= 1;
|
2020-04-20 06:16:56 +00:00
|
|
|
CallMethod(command_header.argument);
|
|
|
|
}
|
2019-02-19 09:26:58 +00:00
|
|
|
|
|
|
|
if (!dma_state.non_incrementing) {
|
|
|
|
dma_state.method++;
|
2019-02-19 08:44:33 +00:00
|
|
|
}
|
2018-11-24 04:20:56 +00:00
|
|
|
|
2019-02-19 09:26:58 +00:00
|
|
|
if (dma_increment_once) {
|
|
|
|
dma_state.non_incrementing = true;
|
|
|
|
}
|
2018-11-24 04:20:56 +00:00
|
|
|
|
2019-02-19 09:26:58 +00:00
|
|
|
dma_state.method_count--;
|
|
|
|
} else {
|
|
|
|
// No command active - this is the first word of a new one
|
|
|
|
switch (command_header.mode) {
|
|
|
|
case SubmissionMode::Increasing:
|
|
|
|
SetState(command_header);
|
|
|
|
dma_state.non_incrementing = false;
|
|
|
|
dma_increment_once = false;
|
|
|
|
break;
|
|
|
|
case SubmissionMode::NonIncreasing:
|
|
|
|
SetState(command_header);
|
|
|
|
dma_state.non_incrementing = true;
|
|
|
|
dma_increment_once = false;
|
|
|
|
break;
|
|
|
|
case SubmissionMode::Inline:
|
|
|
|
dma_state.method = command_header.method;
|
|
|
|
dma_state.subchannel = command_header.subchannel;
|
|
|
|
CallMethod(command_header.arg_count);
|
|
|
|
dma_state.non_incrementing = true;
|
|
|
|
dma_increment_once = false;
|
|
|
|
break;
|
|
|
|
case SubmissionMode::IncreaseOnce:
|
|
|
|
SetState(command_header);
|
|
|
|
dma_state.non_incrementing = false;
|
|
|
|
dma_increment_once = true;
|
|
|
|
break;
|
2019-04-03 07:33:36 +00:00
|
|
|
default:
|
|
|
|
break;
|
2019-02-19 09:26:58 +00:00
|
|
|
}
|
2018-11-28 00:17:33 +00:00
|
|
|
}
|
2020-04-20 06:16:56 +00:00
|
|
|
index++;
|
2019-02-19 09:26:58 +00:00
|
|
|
}
|
|
|
|
|
2018-11-24 04:20:56 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void DmaPusher::SetState(const CommandHeader& command_header) {
|
|
|
|
dma_state.method = command_header.method;
|
|
|
|
dma_state.subchannel = command_header.subchannel;
|
|
|
|
dma_state.method_count = command_header.method_count;
|
|
|
|
}
|
|
|
|
|
|
|
|
void DmaPusher::CallMethod(u32 argument) const {
|
2020-04-28 02:07:21 +00:00
|
|
|
if (dma_state.method < non_puller_methods) {
|
2020-12-04 19:39:12 +00:00
|
|
|
gpu.CallMethod(GPU::MethodCall{
|
|
|
|
dma_state.method,
|
|
|
|
argument,
|
|
|
|
dma_state.subchannel,
|
|
|
|
dma_state.method_count,
|
|
|
|
});
|
2020-04-28 02:07:21 +00:00
|
|
|
} else {
|
|
|
|
subchannels[dma_state.subchannel]->CallMethod(dma_state.method, argument,
|
|
|
|
dma_state.is_last_call);
|
|
|
|
}
|
2018-11-24 04:20:56 +00:00
|
|
|
}
|
|
|
|
|
2020-04-20 06:16:56 +00:00
|
|
|
void DmaPusher::CallMultiMethod(const u32* base_start, u32 num_methods) const {
|
2020-04-28 02:07:21 +00:00
|
|
|
if (dma_state.method < non_puller_methods) {
|
|
|
|
gpu.CallMultiMethod(dma_state.method, dma_state.subchannel, base_start, num_methods,
|
|
|
|
dma_state.method_count);
|
|
|
|
} else {
|
|
|
|
subchannels[dma_state.subchannel]->CallMultiMethod(dma_state.method, base_start,
|
|
|
|
num_methods, dma_state.method_count);
|
|
|
|
}
|
2020-04-20 06:16:56 +00:00
|
|
|
}
|
|
|
|
|
2018-11-24 04:20:56 +00:00
|
|
|
} // namespace Tegra
|