early-access version 3926

This commit is contained in:
pineappleEA 2023-10-12 05:36:54 +02:00
parent 595fefba8d
commit 91ade31ddd
25 changed files with 484 additions and 405 deletions

View file

@ -1,7 +1,7 @@
yuzu emulator early access yuzu emulator early access
============= =============
This is the source code for early-access 3925. This is the source code for early-access 3926.
## Legal Notice ## Legal Notice

View file

@ -27,7 +27,7 @@ if (CMAKE_SYSTEM_NAME STREQUAL "Windows" OR ANDROID)
set(CAN_BUILD_NX_TZDB false) set(CAN_BUILD_NX_TZDB false)
endif() endif()
set(NX_TZDB_VERSION "220816") set(NX_TZDB_VERSION "221202")
set(NX_TZDB_ARCHIVE "${CMAKE_CURRENT_BINARY_DIR}/${NX_TZDB_VERSION}.zip") set(NX_TZDB_ARCHIVE "${CMAKE_CURRENT_BINARY_DIR}/${NX_TZDB_VERSION}.zip")
set(NX_TZDB_ROMFS_DIR "${CMAKE_CURRENT_BINARY_DIR}/nx_tzdb") set(NX_TZDB_ROMFS_DIR "${CMAKE_CURRENT_BINARY_DIR}/nx_tzdb")

View file

@ -16,7 +16,7 @@ namespace Service::Capture {
void LoopProcess(Core::System& system) { void LoopProcess(Core::System& system) {
auto server_manager = std::make_unique<ServerManager>(system); auto server_manager = std::make_unique<ServerManager>(system);
auto album_manager = std::make_shared<AlbumManager>(); auto album_manager = std::make_shared<AlbumManager>(system);
server_manager->RegisterNamedService( server_manager->RegisterNamedService(
"caps:a", std::make_shared<IAlbumAccessorService>(system, album_manager)); "caps:a", std::make_shared<IAlbumAccessorService>(system, album_manager));

View file

@ -8,12 +8,15 @@
#include "common/fs/file.h" #include "common/fs/file.h"
#include "common/fs/path_util.h" #include "common/fs/path_util.h"
#include "common/logging/log.h" #include "common/logging/log.h"
#include "core/core.h"
#include "core/hle/service/caps/caps_manager.h" #include "core/hle/service/caps/caps_manager.h"
#include "core/hle/service/caps/caps_result.h" #include "core/hle/service/caps/caps_result.h"
#include "core/hle/service/time/time_manager.h"
#include "core/hle/service/time/time_zone_content_manager.h"
namespace Service::Capture { namespace Service::Capture {
AlbumManager::AlbumManager() {} AlbumManager::AlbumManager(Core::System& system_) : system{system_} {}
AlbumManager::~AlbumManager() = default; AlbumManager::~AlbumManager() = default;
@ -83,8 +86,36 @@ Result AlbumManager::GetAlbumFileList(std::vector<AlbumEntry>& out_entries, Albu
} }
Result AlbumManager::GetAlbumFileList(std::vector<ApplicationAlbumFileEntry>& out_entries, Result AlbumManager::GetAlbumFileList(std::vector<ApplicationAlbumFileEntry>& out_entries,
ContentType contex_type, s64 start_posix_time,
s64 end_posix_time, u64 aruid) {
if (!is_mounted) {
return ResultIsNotMounted;
}
std::vector<ApplicationAlbumEntry> album_entries;
const auto start_date = ConvertToAlbumDateTime(start_posix_time);
const auto end_date = ConvertToAlbumDateTime(end_posix_time);
const auto result = GetAlbumFileList(album_entries, contex_type, start_date, end_date, aruid);
if (result.IsError()) {
return result;
}
for (const auto& album_entry : album_entries) {
ApplicationAlbumFileEntry entry{
.entry = album_entry,
.datetime = album_entry.datetime,
.unknown = {},
};
out_entries.push_back(entry);
}
return ResultSuccess;
}
Result AlbumManager::GetAlbumFileList(std::vector<ApplicationAlbumEntry>& out_entries,
ContentType contex_type, AlbumFileDateTime start_date, ContentType contex_type, AlbumFileDateTime start_date,
AlbumFileDateTime end_date, u64 aruid) const { AlbumFileDateTime end_date, u64 aruid) {
if (!is_mounted) { if (!is_mounted) {
return ResultIsNotMounted; return ResultIsNotMounted;
} }
@ -93,31 +124,25 @@ Result AlbumManager::GetAlbumFileList(std::vector<ApplicationAlbumFileEntry>& ou
if (file_id.type != contex_type) { if (file_id.type != contex_type) {
continue; continue;
} }
if (file_id.date > start_date) { if (file_id.date > start_date) {
continue; continue;
} }
if (file_id.date < end_date) { if (file_id.date < end_date) {
continue; continue;
} }
if (out_entries.size() >= SdAlbumFileLimit) { if (out_entries.size() >= SdAlbumFileLimit) {
break; break;
} }
const auto entry_size = Common::FS::GetSize(path); const auto entry_size = Common::FS::GetSize(path);
ApplicationAlbumFileEntry entry{.entry = ApplicationAlbumEntry entry{
{
.size = entry_size, .size = entry_size,
.hash{}, .hash{},
.datetime = file_id.date, .datetime = file_id.date,
.storage = file_id.storage, .storage = file_id.storage,
.content = contex_type, .content = contex_type,
.unknown = 1, .unknown = 1,
}, };
.datetime = file_id.date,
.unknown = {}};
out_entries.push_back(entry); out_entries.push_back(entry);
} }
@ -274,12 +299,12 @@ Result AlbumManager::GetAlbumEntry(AlbumEntry& out_entry, const std::filesystem:
.application_id = static_cast<u64>(std::stoll(application, 0, 16)), .application_id = static_cast<u64>(std::stoll(application, 0, 16)),
.date = .date =
{ {
.year = static_cast<u16>(std::stoi(year)), .year = static_cast<s16>(std::stoi(year)),
.month = static_cast<u8>(std::stoi(month)), .month = static_cast<s8>(std::stoi(month)),
.day = static_cast<u8>(std::stoi(day)), .day = static_cast<s8>(std::stoi(day)),
.hour = static_cast<u8>(std::stoi(hour)), .hour = static_cast<s8>(std::stoi(hour)),
.minute = static_cast<u8>(std::stoi(minute)), .minute = static_cast<s8>(std::stoi(minute)),
.second = static_cast<u8>(std::stoi(second)), .second = static_cast<s8>(std::stoi(second)),
.unique_id = 0, .unique_id = 0,
}, },
.storage = AlbumStorage::Sd, .storage = AlbumStorage::Sd,
@ -339,4 +364,23 @@ Result AlbumManager::LoadImage(std::span<u8> out_image, const std::filesystem::p
return ResultSuccess; return ResultSuccess;
} }
AlbumFileDateTime AlbumManager::ConvertToAlbumDateTime(u64 posix_time) const {
Time::TimeZone::CalendarInfo calendar_date{};
const auto& time_zone_manager =
system.GetTimeManager().GetTimeZoneContentManager().GetTimeZoneManager();
time_zone_manager.ToCalendarTimeWithMyRules(posix_time, calendar_date);
return {
.year = calendar_date.time.year,
.month = calendar_date.time.month,
.day = calendar_date.time.day,
.hour = calendar_date.time.hour,
.minute = calendar_date.time.minute,
.second = calendar_date.time.second,
.unique_id = 0,
};
}
} // namespace Service::Capture } // namespace Service::Capture

View file

@ -37,7 +37,7 @@ namespace Service::Capture {
class AlbumManager { class AlbumManager {
public: public:
explicit AlbumManager(); explicit AlbumManager(Core::System& system_);
~AlbumManager(); ~AlbumManager();
Result DeleteAlbumFile(const AlbumFileId& file_id); Result DeleteAlbumFile(const AlbumFileId& file_id);
@ -45,8 +45,11 @@ public:
Result GetAlbumFileList(std::vector<AlbumEntry>& out_entries, AlbumStorage storage, Result GetAlbumFileList(std::vector<AlbumEntry>& out_entries, AlbumStorage storage,
u8 flags) const; u8 flags) const;
Result GetAlbumFileList(std::vector<ApplicationAlbumFileEntry>& out_entries, Result GetAlbumFileList(std::vector<ApplicationAlbumFileEntry>& out_entries,
ContentType contex_type, s64 start_posix_time, s64 end_posix_time,
u64 aruid);
Result GetAlbumFileList(std::vector<ApplicationAlbumEntry>& out_entries,
ContentType contex_type, AlbumFileDateTime start_date, ContentType contex_type, AlbumFileDateTime start_date,
AlbumFileDateTime end_date, u64 aruid) const; AlbumFileDateTime end_date, u64 aruid);
Result GetAutoSavingStorage(bool& out_is_autosaving) const; Result GetAutoSavingStorage(bool& out_is_autosaving) const;
Result LoadAlbumScreenShotImage(LoadAlbumScreenShotImageOutput& out_image_output, Result LoadAlbumScreenShotImage(LoadAlbumScreenShotImageOutput& out_image_output,
std::vector<u8>& out_image, const AlbumFileId& file_id, std::vector<u8>& out_image, const AlbumFileId& file_id,
@ -65,8 +68,12 @@ private:
Result LoadImage(std::span<u8> out_image, const std::filesystem::path& path, int width, Result LoadImage(std::span<u8> out_image, const std::filesystem::path& path, int width,
int height, ScreenShotDecoderFlag flag) const; int height, ScreenShotDecoderFlag flag) const;
AlbumFileDateTime ConvertToAlbumDateTime(u64 posix_time) const;
bool is_mounted{}; bool is_mounted{};
std::unordered_map<AlbumFileId, std::filesystem::path> album_files; std::unordered_map<AlbumFileId, std::filesystem::path> album_files;
Core::System& system;
}; };
} // namespace Service::Capture } // namespace Service::Capture

View file

@ -41,13 +41,13 @@ enum class ScreenShotDecoderFlag : u64 {
// This is nn::capsrv::AlbumFileDateTime // This is nn::capsrv::AlbumFileDateTime
struct AlbumFileDateTime { struct AlbumFileDateTime {
u16 year{}; s16 year{};
u8 month{}; s8 month{};
u8 day{}; s8 day{};
u8 hour{}; s8 hour{};
u8 minute{}; s8 minute{};
u8 second{}; s8 second{};
u8 unique_id{}; s8 unique_id{};
friend constexpr bool operator==(const AlbumFileDateTime&, const AlbumFileDateTime&) = default; friend constexpr bool operator==(const AlbumFileDateTime&, const AlbumFileDateTime&) = default;
friend constexpr bool operator>(const AlbumFileDateTime& a, const AlbumFileDateTime& b) { friend constexpr bool operator>(const AlbumFileDateTime& a, const AlbumFileDateTime& b) {

View file

@ -50,22 +50,35 @@ void IAlbumApplicationService::SetShimLibraryVersion(HLERequestContext& ctx) {
void IAlbumApplicationService::GetAlbumFileList0AafeAruidDeprecated(HLERequestContext& ctx) { void IAlbumApplicationService::GetAlbumFileList0AafeAruidDeprecated(HLERequestContext& ctx) {
IPC::RequestParser rp{ctx}; IPC::RequestParser rp{ctx};
const auto pid{rp.Pop<s32>()}; struct Parameters {
const auto content_type{rp.PopEnum<ContentType>()}; ContentType content_type;
const auto start_posix_time{rp.Pop<s64>()}; INSERT_PADDING_BYTES(7);
const auto end_posix_time{rp.Pop<s64>()}; s64 start_posix_time;
const auto applet_resource_user_id{rp.Pop<u64>()}; s64 end_posix_time;
u64 applet_resource_user_id;
};
static_assert(sizeof(Parameters) == 0x20, "Parameters has incorrect size.");
const auto parameters{rp.PopRaw<Parameters>()};
LOG_WARNING(Service_Capture, LOG_WARNING(Service_Capture,
"(STUBBED) called. pid={}, content_type={}, start_posix_time={}, " "(STUBBED) called. content_type={}, start_posix_time={}, end_posix_time={}, "
"end_posix_time={}, applet_resource_user_id={}", "applet_resource_user_id={}",
pid, content_type, start_posix_time, end_posix_time, applet_resource_user_id); parameters.content_type, parameters.start_posix_time, parameters.end_posix_time,
parameters.applet_resource_user_id);
// TODO: Translate posix to DateTime Result result = ResultSuccess;
if (result.IsSuccess()) {
result = manager->IsAlbumMounted(AlbumStorage::Sd);
}
std::vector<ApplicationAlbumFileEntry> entries; std::vector<ApplicationAlbumFileEntry> entries;
const Result result = if (result.IsSuccess()) {
manager->GetAlbumFileList(entries, content_type, {}, {}, applet_resource_user_id); result = manager->GetAlbumFileList(entries, parameters.content_type,
parameters.start_posix_time, parameters.end_posix_time,
parameters.applet_resource_user_id);
}
if (!entries.empty()) { if (!entries.empty()) {
ctx.WriteBuffer(entries); ctx.WriteBuffer(entries);
@ -78,19 +91,38 @@ void IAlbumApplicationService::GetAlbumFileList0AafeAruidDeprecated(HLERequestCo
void IAlbumApplicationService::GetAlbumFileList3AaeAruid(HLERequestContext& ctx) { void IAlbumApplicationService::GetAlbumFileList3AaeAruid(HLERequestContext& ctx) {
IPC::RequestParser rp{ctx}; IPC::RequestParser rp{ctx};
const auto pid{rp.Pop<s32>()}; struct Parameters {
const auto content_type{rp.PopEnum<ContentType>()}; ContentType content_type;
const auto start_date_time{rp.PopRaw<AlbumFileDateTime>()}; INSERT_PADDING_BYTES(1);
const auto end_date_time{rp.PopRaw<AlbumFileDateTime>()}; AlbumFileDateTime start_date_time;
const auto applet_resource_user_id{rp.Pop<u64>()}; AlbumFileDateTime end_date_time;
INSERT_PADDING_BYTES(6);
u64 applet_resource_user_id;
};
static_assert(sizeof(Parameters) == 0x20, "Parameters has incorrect size.");
const auto parameters{rp.PopRaw<Parameters>()};
LOG_WARNING(Service_Capture, LOG_WARNING(Service_Capture,
"(STUBBED) called. pid={}, content_type={}, applet_resource_user_id={}", pid, "(STUBBED) called. content_type={}, start_date={}/{}/{}, "
content_type, applet_resource_user_id); "end_date={}/{}/{}, applet_resource_user_id={}",
parameters.content_type, parameters.start_date_time.year,
parameters.start_date_time.month, parameters.start_date_time.day,
parameters.end_date_time.year, parameters.end_date_time.month,
parameters.end_date_time.day, parameters.applet_resource_user_id);
std::vector<ApplicationAlbumFileEntry> entries; Result result = ResultSuccess;
const Result result = manager->GetAlbumFileList(entries, content_type, start_date_time,
end_date_time, applet_resource_user_id); if (result.IsSuccess()) {
result = manager->IsAlbumMounted(AlbumStorage::Sd);
}
std::vector<ApplicationAlbumEntry> entries;
if (result.IsSuccess()) {
result =
manager->GetAlbumFileList(entries, parameters.content_type, parameters.start_date_time,
parameters.end_date_time, parameters.applet_resource_user_id);
}
if (!entries.empty()) { if (!entries.empty()) {
ctx.WriteBuffer(entries); ctx.WriteBuffer(entries);

View file

@ -118,6 +118,8 @@ public:
void InsertUploadMemoryBarrier(); void InsertUploadMemoryBarrier();
void TransitionImageLayout(Image& image) {}
FormatProperties FormatInfo(VideoCommon::ImageType type, GLenum internal_format) const; FormatProperties FormatInfo(VideoCommon::ImageType type, GLenum internal_format) const;
bool HasNativeBgr() const noexcept { bool HasNativeBgr() const noexcept {

View file

@ -456,7 +456,7 @@ void BlitImageHelper::BlitColor(const Framebuffer* dst_framebuffer, VkImageView
const VkPipeline pipeline = FindOrEmplaceColorPipeline(key); const VkPipeline pipeline = FindOrEmplaceColorPipeline(key);
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([this, dst_region, src_region, pipeline, layout, sampler, scheduler.Record([this, dst_region, src_region, pipeline, layout, sampler,
src_view](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { src_view](vk::CommandBuffer cmdbuf) {
// TODO: Barriers // TODO: Barriers
const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit(); const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit();
UpdateOneTextureDescriptorSet(device, descriptor_set, sampler, src_view); UpdateOneTextureDescriptorSet(device, descriptor_set, sampler, src_view);
@ -481,8 +481,7 @@ void BlitImageHelper::BlitColor(const Framebuffer* dst_framebuffer, VkImageView
const VkPipeline pipeline = FindOrEmplaceColorPipeline(key); const VkPipeline pipeline = FindOrEmplaceColorPipeline(key);
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([this, dst_framebuffer, src_image_view, src_image, src_sampler, dst_region, scheduler.Record([this, dst_framebuffer, src_image_view, src_image, src_sampler, dst_region,
src_region, src_size, pipeline, src_region, src_size, pipeline, layout](vk::CommandBuffer cmdbuf) {
layout](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
TransitionImageLayout(cmdbuf, src_image, VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL); TransitionImageLayout(cmdbuf, src_image, VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL);
BeginRenderPass(cmdbuf, dst_framebuffer); BeginRenderPass(cmdbuf, dst_framebuffer);
const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit(); const VkDescriptorSet descriptor_set = one_texture_descriptor_allocator.Commit();
@ -515,7 +514,7 @@ void BlitImageHelper::BlitDepthStencil(const Framebuffer* dst_framebuffer,
const VkPipeline pipeline = FindOrEmplaceDepthStencilPipeline(key); const VkPipeline pipeline = FindOrEmplaceDepthStencilPipeline(key);
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([dst_region, src_region, pipeline, layout, sampler, src_depth_view, scheduler.Record([dst_region, src_region, pipeline, layout, sampler, src_depth_view,
src_stencil_view, this](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { src_stencil_view, this](vk::CommandBuffer cmdbuf) {
// TODO: Barriers // TODO: Barriers
const VkDescriptorSet descriptor_set = two_textures_descriptor_allocator.Commit(); const VkDescriptorSet descriptor_set = two_textures_descriptor_allocator.Commit();
UpdateTwoTexturesDescriptorSet(device, descriptor_set, sampler, src_depth_view, UpdateTwoTexturesDescriptorSet(device, descriptor_set, sampler, src_depth_view,
@ -591,8 +590,8 @@ void BlitImageHelper::ClearColor(const Framebuffer* dst_framebuffer, u8 color_ma
const VkPipeline pipeline = FindOrEmplaceClearColorPipeline(key); const VkPipeline pipeline = FindOrEmplaceClearColorPipeline(key);
const VkPipelineLayout layout = *clear_color_pipeline_layout; const VkPipelineLayout layout = *clear_color_pipeline_layout;
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([pipeline, layout, color_mask, clear_color, scheduler.Record(
dst_region](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { [pipeline, layout, color_mask, clear_color, dst_region](vk::CommandBuffer cmdbuf) {
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
const std::array blend_color = { const std::array blend_color = {
(color_mask & 0x1) ? 1.0f : 0.0f, (color_mask & 0x2) ? 1.0f : 0.0f, (color_mask & 0x1) ? 1.0f : 0.0f, (color_mask & 0x2) ? 1.0f : 0.0f,
@ -618,8 +617,7 @@ void BlitImageHelper::ClearDepthStencil(const Framebuffer* dst_framebuffer, bool
const VkPipeline pipeline = FindOrEmplaceClearStencilPipeline(key); const VkPipeline pipeline = FindOrEmplaceClearStencilPipeline(key);
const VkPipelineLayout layout = *clear_color_pipeline_layout; const VkPipelineLayout layout = *clear_color_pipeline_layout;
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record( scheduler.Record([pipeline, layout, clear_depth, dst_region](vk::CommandBuffer cmdbuf) {
[pipeline, layout, clear_depth, dst_region](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
constexpr std::array blend_constants{0.0f, 0.0f, 0.0f, 0.0f}; constexpr std::array blend_constants{0.0f, 0.0f, 0.0f, 0.0f};
cmdbuf.SetBlendConstants(blend_constants.data()); cmdbuf.SetBlendConstants(blend_constants.data());
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline); cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline);
@ -638,8 +636,7 @@ void BlitImageHelper::Convert(VkPipeline pipeline, const Framebuffer* dst_frameb
const VkExtent2D extent = GetConversionExtent(src_image_view); const VkExtent2D extent = GetConversionExtent(src_image_view);
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([pipeline, layout, sampler, src_view, extent, this](vk::CommandBuffer cmdbuf, scheduler.Record([pipeline, layout, sampler, src_view, extent, this](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const VkOffset2D offset{ const VkOffset2D offset{
.x = 0, .x = 0,
.y = 0, .y = 0,
@ -685,7 +682,7 @@ void BlitImageHelper::ConvertDepthStencil(VkPipeline pipeline, const Framebuffer
scheduler.RequestRenderpass(dst_framebuffer); scheduler.RequestRenderpass(dst_framebuffer);
scheduler.Record([pipeline, layout, sampler, src_depth_view, src_stencil_view, extent, scheduler.Record([pipeline, layout, sampler, src_depth_view, src_stencil_view, extent,
this](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { this](vk::CommandBuffer cmdbuf) {
const VkOffset2D offset{ const VkOffset2D offset{
.x = 0, .x = 0,
.y = 0, .y = 0,

View file

@ -239,7 +239,7 @@ void Vulkan::RendererVulkan::RenderScreenshot(const Tegra::FramebufferConfig& fr
memory_allocator.CreateBuffer(dst_buffer_info, MemoryUsage::Download); memory_allocator.CreateBuffer(dst_buffer_info, MemoryUsage::Download);
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([&](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([&](vk::CommandBuffer cmdbuf) {
const VkImageMemoryBarrier read_barrier{ const VkImageMemoryBarrier read_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,

View file

@ -202,8 +202,7 @@ void BlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
.depth = 1, .depth = 1,
}, },
}; };
scheduler.Record( scheduler.Record([this, copy, index = image_index](vk::CommandBuffer cmdbuf) {
[this, copy, index = image_index](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
const VkImage image = *raw_images[index]; const VkImage image = *raw_images[index];
const VkImageMemoryBarrier base_barrier{ const VkImageMemoryBarrier base_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
@ -232,8 +231,8 @@ void BlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
write_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; write_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
write_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; write_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0,
0, read_barrier); read_barrier);
cmdbuf.CopyBufferToImage(*buffer, image, VK_IMAGE_LAYOUT_GENERAL, copy); cmdbuf.CopyBufferToImage(*buffer, image, VK_IMAGE_LAYOUT_GENERAL, copy);
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
@ -252,7 +251,7 @@ void BlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
.height = (up_scale * framebuffer.height) >> down_shift, .height = (up_scale * framebuffer.height) >> down_shift,
}; };
scheduler.Record([this, index = image_index, size, scheduler.Record([this, index = image_index, size,
anti_alias_pass](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { anti_alias_pass](vk::CommandBuffer cmdbuf) {
const VkImageMemoryBarrier base_barrier{ const VkImageMemoryBarrier base_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -377,7 +376,7 @@ void BlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
} }
scheduler.Record([this, host_framebuffer, index = image_index, scheduler.Record([this, host_framebuffer, index = image_index,
size = render_area](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { size = render_area](vk::CommandBuffer cmdbuf) {
const f32 bg_red = Settings::values.bg_red.GetValue() / 255.0f; const f32 bg_red = Settings::values.bg_red.GetValue() / 255.0f;
const f32 bg_green = Settings::values.bg_green.GetValue() / 255.0f; const f32 bg_green = Settings::values.bg_green.GetValue() / 255.0f;
const f32 bg_blue = Settings::values.bg_blue.GetValue() / 255.0f; const f32 bg_blue = Settings::values.bg_blue.GetValue() / 255.0f;

View file

@ -179,8 +179,7 @@ public:
if (!host_visible) { if (!host_visible) {
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([src_buffer = staging.buffer, src_offset = staging.offset, scheduler.Record([src_buffer = staging.buffer, src_offset = staging.offset,
dst_buffer = *buffer, dst_buffer = *buffer, size_bytes](vk::CommandBuffer cmdbuf) {
size_bytes](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
const VkBufferCopy copy{ const VkBufferCopy copy{
.srcOffset = src_offset, .srcOffset = src_offset,
.dstOffset = 0, .dstOffset = 0,
@ -211,8 +210,7 @@ public:
const size_t sub_first_offset = static_cast<size_t>(first % 4) * GetQuadsNum(num_indices); const size_t sub_first_offset = static_cast<size_t>(first % 4) * GetQuadsNum(num_indices);
const size_t offset = const size_t offset =
(sub_first_offset + GetQuadsNum(first)) * 6ULL * BytesPerIndex(index_type); (sub_first_offset + GetQuadsNum(first)) * 6ULL * BytesPerIndex(index_type);
scheduler.Record( scheduler.Record([buffer_ = *buffer, index_type_, offset](vk::CommandBuffer cmdbuf) {
[buffer_ = *buffer, index_type_, offset](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.BindIndexBuffer(buffer_, offset, index_type_); cmdbuf.BindIndexBuffer(buffer_, offset, index_type_);
}); });
} }
@ -402,16 +400,15 @@ void BufferCacheRuntime::CopyBuffer(VkBuffer dst_buffer, VkBuffer src_buffer,
boost::container::small_vector<VkBufferCopy, 8> vk_copies(copies.size()); boost::container::small_vector<VkBufferCopy, 8> vk_copies(copies.size());
std::ranges::transform(copies, vk_copies.begin(), MakeBufferCopy); std::ranges::transform(copies, vk_copies.begin(), MakeBufferCopy);
if (src_buffer == staging_pool.StreamBuf() && can_reorder_upload) { if (src_buffer == staging_pool.StreamBuf() && can_reorder_upload) {
scheduler.Record([src_buffer, dst_buffer, vk_copies](vk::CommandBuffer, scheduler.RecordWithUploadBuffer([src_buffer, dst_buffer, vk_copies](
vk::CommandBuffer upload_cmdbuf) { vk::CommandBuffer, vk::CommandBuffer upload_cmdbuf) {
upload_cmdbuf.CopyBuffer(src_buffer, dst_buffer, vk_copies); upload_cmdbuf.CopyBuffer(src_buffer, dst_buffer, vk_copies);
}); });
return; return;
} }
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record( scheduler.Record([src_buffer, dst_buffer, vk_copies, barrier](vk::CommandBuffer cmdbuf) {
[src_buffer, dst_buffer, vk_copies, barrier](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
if (barrier) { if (barrier) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER); VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER);
@ -432,7 +429,7 @@ void BufferCacheRuntime::PreCopyBarrier() {
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT, .dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
}; };
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
0, READ_BARRIER); 0, READ_BARRIER);
}); });
@ -446,7 +443,7 @@ void BufferCacheRuntime::PostCopyBarrier() {
.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT, .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
}; };
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
0, WRITE_BARRIER); 0, WRITE_BARRIER);
}); });
@ -470,13 +467,12 @@ void BufferCacheRuntime::ClearBuffer(VkBuffer dest_buffer, u32 offset, size_t si
}; };
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record( scheduler.Record([dest_buffer, offset, size, value](vk::CommandBuffer cmdbuf) {
[dest_buffer, offset, size, value](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, READ_BARRIER);
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER);
cmdbuf.FillBuffer(dest_buffer, offset, size, value); cmdbuf.FillBuffer(dest_buffer, offset, size, value);
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, WRITE_BARRIER); 0, WRITE_BARRIER);
}); });
} }
@ -502,8 +498,7 @@ void BufferCacheRuntime::BindIndexBuffer(PrimitiveTopology topology, IndexFormat
ReserveNullBuffer(); ReserveNullBuffer();
vk_buffer = *null_buffer; vk_buffer = *null_buffer;
} }
scheduler.Record( scheduler.Record([vk_buffer, vk_offset, vk_index_type](vk::CommandBuffer cmdbuf) {
[vk_buffer, vk_offset, vk_index_type](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.BindIndexBuffer(vk_buffer, vk_offset, vk_index_type); cmdbuf.BindIndexBuffer(vk_buffer, vk_offset, vk_index_type);
}); });
} }
@ -511,7 +506,7 @@ void BufferCacheRuntime::BindIndexBuffer(PrimitiveTopology topology, IndexFormat
void BufferCacheRuntime::BindQuadIndexBuffer(PrimitiveTopology topology, u32 first, u32 count) { void BufferCacheRuntime::BindQuadIndexBuffer(PrimitiveTopology topology, u32 first, u32 count) {
if (count == 0) { if (count == 0) {
ReserveNullBuffer(); ReserveNullBuffer();
scheduler.Record([this](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([this](vk::CommandBuffer cmdbuf) {
cmdbuf.BindIndexBuffer(*null_buffer, 0, VK_INDEX_TYPE_UINT32); cmdbuf.BindIndexBuffer(*null_buffer, 0, VK_INDEX_TYPE_UINT32);
}); });
return; return;
@ -532,8 +527,7 @@ void BufferCacheRuntime::BindVertexBuffer(u32 index, VkBuffer buffer, u32 offset
return; return;
} }
if (device.IsExtExtendedDynamicStateSupported()) { if (device.IsExtExtendedDynamicStateSupported()) {
scheduler.Record( scheduler.Record([index, buffer, offset, size, stride](vk::CommandBuffer cmdbuf) {
[index, buffer, offset, size, stride](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
const VkDeviceSize vk_offset = buffer != VK_NULL_HANDLE ? offset : 0; const VkDeviceSize vk_offset = buffer != VK_NULL_HANDLE ? offset : 0;
const VkDeviceSize vk_size = buffer != VK_NULL_HANDLE ? size : VK_WHOLE_SIZE; const VkDeviceSize vk_size = buffer != VK_NULL_HANDLE ? size : VK_WHOLE_SIZE;
const VkDeviceSize vk_stride = stride; const VkDeviceSize vk_stride = stride;
@ -545,7 +539,7 @@ void BufferCacheRuntime::BindVertexBuffer(u32 index, VkBuffer buffer, u32 offset
buffer = *null_buffer; buffer = *null_buffer;
offset = 0; offset = 0;
} }
scheduler.Record([index, buffer, offset](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([index, buffer, offset](vk::CommandBuffer cmdbuf) {
cmdbuf.BindVertexBuffer(index, buffer, offset); cmdbuf.BindVertexBuffer(index, buffer, offset);
}); });
} }
@ -567,8 +561,7 @@ void BufferCacheRuntime::BindVertexBuffers(VideoCommon::HostBindings<Buffer>& bi
} }
if (device.IsExtExtendedDynamicStateSupported()) { if (device.IsExtExtendedDynamicStateSupported()) {
scheduler.Record([this, bindings_ = std::move(bindings), scheduler.Record([this, bindings_ = std::move(bindings),
buffer_handles_ = std::move(buffer_handles)](vk::CommandBuffer cmdbuf, buffer_handles_ = std::move(buffer_handles)](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
cmdbuf.BindVertexBuffers2EXT(bindings_.min_index, cmdbuf.BindVertexBuffers2EXT(bindings_.min_index,
std::min(bindings_.max_index - bindings_.min_index, std::min(bindings_.max_index - bindings_.min_index,
device.GetMaxVertexInputBindings()), device.GetMaxVertexInputBindings()),
@ -577,8 +570,7 @@ void BufferCacheRuntime::BindVertexBuffers(VideoCommon::HostBindings<Buffer>& bi
}); });
} else { } else {
scheduler.Record([this, bindings_ = std::move(bindings), scheduler.Record([this, bindings_ = std::move(bindings),
buffer_handles_ = std::move(buffer_handles)](vk::CommandBuffer cmdbuf, buffer_handles_ = std::move(buffer_handles)](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
cmdbuf.BindVertexBuffers(bindings_.min_index, cmdbuf.BindVertexBuffers(bindings_.min_index,
std::min(bindings_.max_index - bindings_.min_index, std::min(bindings_.max_index - bindings_.min_index,
device.GetMaxVertexInputBindings()), device.GetMaxVertexInputBindings()),
@ -601,7 +593,7 @@ void BufferCacheRuntime::BindTransformFeedbackBuffer(u32 index, VkBuffer buffer,
offset = 0; offset = 0;
size = 0; size = 0;
} }
scheduler.Record([index, buffer, offset, size](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([index, buffer, offset, size](vk::CommandBuffer cmdbuf) {
const VkDeviceSize vk_offset = offset; const VkDeviceSize vk_offset = offset;
const VkDeviceSize vk_size = size; const VkDeviceSize vk_size = size;
cmdbuf.BindTransformFeedbackBuffersEXT(index, 1, &buffer, &vk_offset, &vk_size); cmdbuf.BindTransformFeedbackBuffersEXT(index, 1, &buffer, &vk_offset, &vk_size);
@ -617,8 +609,8 @@ void BufferCacheRuntime::BindTransformFeedbackBuffers(VideoCommon::HostBindings<
for (u32 index = 0; index < bindings.buffers.size(); ++index) { for (u32 index = 0; index < bindings.buffers.size(); ++index) {
buffer_handles.push_back(bindings.buffers[index]->Handle()); buffer_handles.push_back(bindings.buffers[index]->Handle());
} }
scheduler.Record([bindings_ = std::move(bindings), buffer_handles_ = std::move(buffer_handles)]( scheduler.Record([bindings_ = std::move(bindings),
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { buffer_handles_ = std::move(buffer_handles)](vk::CommandBuffer cmdbuf) {
cmdbuf.BindTransformFeedbackBuffersEXT(0, static_cast<u32>(buffer_handles_.size()), cmdbuf.BindTransformFeedbackBuffersEXT(0, static_cast<u32>(buffer_handles_.size()),
buffer_handles_.data(), bindings_.offsets.data(), buffer_handles_.data(), bindings_.offsets.data(),
bindings_.sizes.data()); bindings_.sizes.data());
@ -649,7 +641,7 @@ void BufferCacheRuntime::ReserveNullBuffer() {
} }
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([buffer = *null_buffer](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([buffer = *null_buffer](vk::CommandBuffer cmdbuf) {
cmdbuf.FillBuffer(buffer, 0, VK_WHOLE_SIZE, 0); cmdbuf.FillBuffer(buffer, 0, VK_WHOLE_SIZE, 0);
}); });
} }

View file

@ -324,8 +324,7 @@ std::pair<VkBuffer, VkDeviceSize> Uint8Pass::Assemble(u32 num_vertices, VkBuffer
const void* const descriptor_data{compute_pass_descriptor_queue.UpdateData()}; const void* const descriptor_data{compute_pass_descriptor_queue.UpdateData()};
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record( scheduler.Record([this, descriptor_data, num_vertices](vk::CommandBuffer cmdbuf) {
[this, descriptor_data, num_vertices](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
static constexpr u32 DISPATCH_SIZE = 1024; static constexpr u32 DISPATCH_SIZE = 1024;
static constexpr VkMemoryBarrier WRITE_BARRIER{ static constexpr VkMemoryBarrier WRITE_BARRIER{
.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
@ -384,7 +383,7 @@ std::pair<VkBuffer, VkDeviceSize> QuadIndexedPass::Assemble(
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([this, descriptor_data, num_tri_vertices, base_vertex, index_shift, scheduler.Record([this, descriptor_data, num_tri_vertices, base_vertex, index_shift,
is_strip](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { is_strip](vk::CommandBuffer cmdbuf) {
static constexpr u32 DISPATCH_SIZE = 1024; static constexpr u32 DISPATCH_SIZE = 1024;
static constexpr VkMemoryBarrier WRITE_BARRIER{ static constexpr VkMemoryBarrier WRITE_BARRIER{
.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
@ -424,7 +423,7 @@ void ConditionalRenderingResolvePass::Resolve(VkBuffer dst_buffer, VkBuffer src_
const void* const descriptor_data{compute_pass_descriptor_queue.UpdateData()}; const void* const descriptor_data{compute_pass_descriptor_queue.UpdateData()};
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([this, descriptor_data](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([this, descriptor_data](vk::CommandBuffer cmdbuf) {
static constexpr VkMemoryBarrier read_barrier{ static constexpr VkMemoryBarrier read_barrier{
.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -484,7 +483,7 @@ void QueriesPrefixScanPass::Run(VkBuffer accumulation_buffer, VkBuffer dst_buffe
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([this, descriptor_data, min_accumulation_limit, max_accumulation_limit, scheduler.Record([this, descriptor_data, min_accumulation_limit, max_accumulation_limit,
runs_to_do, used_offset](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { runs_to_do, used_offset](vk::CommandBuffer cmdbuf) {
static constexpr VkMemoryBarrier read_barrier{ static constexpr VkMemoryBarrier read_barrier{
.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -549,8 +548,8 @@ void ASTCDecoderPass::Assemble(Image& image, const StagingBufferRef& map,
const VkImageAspectFlags aspect_mask = image.AspectMask(); const VkImageAspectFlags aspect_mask = image.AspectMask();
const VkImage vk_image = image.Handle(); const VkImage vk_image = image.Handle();
const bool is_initialized = image.ExchangeInitialization(); const bool is_initialized = image.ExchangeInitialization();
scheduler.Record([vk_pipeline, vk_image, aspect_mask, is_initialized](vk::CommandBuffer cmdbuf, scheduler.Record([vk_pipeline, vk_image, aspect_mask,
vk::CommandBuffer) { is_initialized](vk::CommandBuffer cmdbuf) {
const VkImageMemoryBarrier image_barrier{ const VkImageMemoryBarrier image_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -593,7 +592,7 @@ void ASTCDecoderPass::Assemble(Image& image, const StagingBufferRef& map,
ASSERT(params.destination == (std::array<s32, 3>{0, 0, 0})); ASSERT(params.destination == (std::array<s32, 3>{0, 0, 0}));
ASSERT(params.bytes_per_block_log2 == 4); ASSERT(params.bytes_per_block_log2 == 4);
scheduler.Record([this, num_dispatches_x, num_dispatches_y, num_dispatches_z, block_dims, scheduler.Record([this, num_dispatches_x, num_dispatches_y, num_dispatches_z, block_dims,
params, descriptor_data](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { params, descriptor_data](vk::CommandBuffer cmdbuf) {
const AstcPushConstants uniforms{ const AstcPushConstants uniforms{
.blocks_dims = block_dims, .blocks_dims = block_dims,
.layer_stride = params.layer_stride, .layer_stride = params.layer_stride,
@ -609,7 +608,7 @@ void ASTCDecoderPass::Assemble(Image& image, const StagingBufferRef& map,
cmdbuf.Dispatch(num_dispatches_x, num_dispatches_y, num_dispatches_z); cmdbuf.Dispatch(num_dispatches_x, num_dispatches_y, num_dispatches_z);
}); });
} }
scheduler.Record([vk_image, aspect_mask](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([vk_image, aspect_mask](vk::CommandBuffer cmdbuf) {
const VkImageMemoryBarrier image_barrier{ const VkImageMemoryBarrier image_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -700,7 +699,7 @@ void MSAACopyPass::CopyImage(Image& dst_image, Image& src_image,
}; };
scheduler.Record([this, dst = dst_image.Handle(), msaa_pipeline, num_dispatches, scheduler.Record([this, dst = dst_image.Handle(), msaa_pipeline, num_dispatches,
descriptor_data](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { descriptor_data](vk::CommandBuffer cmdbuf) {
const VkDescriptorSet set = descriptor_allocator.Commit(); const VkDescriptorSet set = descriptor_allocator.Commit();
device.GetLogical().UpdateDescriptorSet(set, *descriptor_template, descriptor_data); device.GetLogical().UpdateDescriptorSet(set, *descriptor_template, descriptor_data);
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, msaa_pipeline); cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, msaa_pipeline);

View file

@ -199,15 +199,15 @@ void ComputePipeline::Configure(Tegra::Engines::KeplerCompute& kepler_compute,
if (!is_built.load(std::memory_order::relaxed)) { if (!is_built.load(std::memory_order::relaxed)) {
// Wait for the pipeline to be built // Wait for the pipeline to be built
scheduler.Record([this](vk::CommandBuffer, vk::CommandBuffer) { scheduler.Record([this](vk::CommandBuffer) {
std::unique_lock lock{build_mutex}; std::unique_lock lock{build_mutex};
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); }); build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
}); });
} }
const void* const descriptor_data{guest_descriptor_queue.UpdateData()}; const void* const descriptor_data{guest_descriptor_queue.UpdateData()};
const bool is_rescaling = !info.texture_descriptors.empty() || !info.image_descriptors.empty(); const bool is_rescaling = !info.texture_descriptors.empty() || !info.image_descriptors.empty();
scheduler.Record([this, descriptor_data, is_rescaling, rescaling_data = rescaling.Data()]( scheduler.Record([this, descriptor_data, is_rescaling,
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { rescaling_data = rescaling.Data()](vk::CommandBuffer cmdbuf) {
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline); cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
if (!descriptor_set_layout) { if (!descriptor_set_layout) {
return; return;

View file

@ -38,8 +38,7 @@ VkImageView FSR::Draw(Scheduler& scheduler, size_t image_index, VkImageView imag
UpdateDescriptorSet(image_index, image_view); UpdateDescriptorSet(image_index, image_view);
scheduler.Record([this, image_index, input_image_extent, crop_rect](vk::CommandBuffer cmdbuf, scheduler.Record([this, image_index, input_image_extent, crop_rect](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const VkImageMemoryBarrier base_barrier{ const VkImageMemoryBarrier base_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,

View file

@ -493,7 +493,7 @@ void GraphicsPipeline::ConfigureDraw(const RescalingPushConstant& rescaling,
if (!is_built.load(std::memory_order::relaxed)) { if (!is_built.load(std::memory_order::relaxed)) {
// Wait for the pipeline to be built // Wait for the pipeline to be built
scheduler.Record([this](vk::CommandBuffer, vk::CommandBuffer) { scheduler.Record([this](vk::CommandBuffer) {
std::unique_lock lock{build_mutex}; std::unique_lock lock{build_mutex};
build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); }); build_condvar.wait(lock, [this] { return is_built.load(std::memory_order::relaxed); });
}); });
@ -502,10 +502,10 @@ void GraphicsPipeline::ConfigureDraw(const RescalingPushConstant& rescaling,
const bool update_rescaling{scheduler.UpdateRescaling(is_rescaling)}; const bool update_rescaling{scheduler.UpdateRescaling(is_rescaling)};
const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)}; const bool bind_pipeline{scheduler.UpdateGraphicsPipeline(this)};
const void* const descriptor_data{guest_descriptor_queue.UpdateData()}; const void* const descriptor_data{guest_descriptor_queue.UpdateData()};
scheduler.Record( scheduler.Record([this, descriptor_data, bind_pipeline, rescaling_data = rescaling.Data(),
[this, descriptor_data, bind_pipeline, rescaling_data = rescaling.Data(), is_rescaling, is_rescaling, update_rescaling,
update_rescaling, uses_render_area = render_area.uses_render_area, uses_render_area = render_area.uses_render_area,
render_area_data = render_area.words](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { render_area_data = render_area.words](vk::CommandBuffer cmdbuf) {
if (bind_pipeline) { if (bind_pipeline) {
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline); cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
} }
@ -528,13 +528,12 @@ void GraphicsPipeline::ConfigureDraw(const RescalingPushConstant& rescaling,
return; return;
} }
if (uses_push_descriptor) { if (uses_push_descriptor) {
cmdbuf.PushDescriptorSetWithTemplateKHR(*descriptor_update_template, cmdbuf.PushDescriptorSetWithTemplateKHR(*descriptor_update_template, *pipeline_layout,
*pipeline_layout, 0, descriptor_data); 0, descriptor_data);
} else { } else {
const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()}; const VkDescriptorSet descriptor_set{descriptor_allocator.Commit()};
const vk::Device& dev{device.GetLogical()}; const vk::Device& dev{device.GetLogical()};
dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, dev.UpdateDescriptorSet(descriptor_set, *descriptor_update_template, descriptor_data);
descriptor_data);
cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0, cmdbuf.BindDescriptorSets(VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline_layout, 0,
descriptor_set, nullptr); descriptor_set, nullptr);
} }

View file

@ -165,7 +165,7 @@ void PresentManager::Present(Frame* frame) {
return; return;
} }
scheduler.Record([this, frame](vk::CommandBuffer, vk::CommandBuffer) { scheduler.Record([this, frame](vk::CommandBuffer) {
std::unique_lock lock{queue_mutex}; std::unique_lock lock{queue_mutex};
present_queue.push(frame); present_queue.push(frame);
frame_cv.notify_one(); frame_cv.notify_one();

View file

@ -138,8 +138,7 @@ public:
}; };
accumulation_buffer = memory_allocator.CreateBuffer(buffer_ci, MemoryUsage::DeviceLocal); accumulation_buffer = memory_allocator.CreateBuffer(buffer_ci, MemoryUsage::DeviceLocal);
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record( scheduler.Record([buffer = *accumulation_buffer](vk::CommandBuffer cmdbuf) {
[buffer = *accumulation_buffer](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.FillBuffer(buffer, 0, 8, 0); cmdbuf.FillBuffer(buffer, 0, 8, 0);
}); });
} }
@ -151,8 +150,8 @@ public:
return; return;
} }
ReserveHostQuery(); ReserveHostQuery();
scheduler.Record([query_pool = current_query_pool, query_index = current_bank_slot]( scheduler.Record([query_pool = current_query_pool,
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { query_index = current_bank_slot](vk::CommandBuffer cmdbuf) {
const bool use_precise = Settings::IsGPULevelHigh(); const bool use_precise = Settings::IsGPULevelHigh();
cmdbuf.BeginQuery(query_pool, static_cast<u32>(query_index), cmdbuf.BeginQuery(query_pool, static_cast<u32>(query_index),
use_precise ? VK_QUERY_CONTROL_PRECISE_BIT : 0); use_precise ? VK_QUERY_CONTROL_PRECISE_BIT : 0);
@ -164,8 +163,8 @@ public:
if (!has_started) { if (!has_started) {
return; return;
} }
scheduler.Record([query_pool = current_query_pool, query_index = current_bank_slot]( scheduler.Record([query_pool = current_query_pool,
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { query_index = current_bank_slot](vk::CommandBuffer cmdbuf) {
cmdbuf.EndQuery(query_pool, static_cast<u32>(query_index)); cmdbuf.EndQuery(query_pool, static_cast<u32>(query_index));
}); });
has_started = false; has_started = false;
@ -228,8 +227,8 @@ public:
auto& resolve_buffer = buffers[resolve_buffer_index]; auto& resolve_buffer = buffers[resolve_buffer_index];
VkQueryPool query_pool = bank->GetInnerPool(); VkQueryPool query_pool = bank->GetInnerPool();
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([start, amount, base_offset, query_pool, buffer = *resolve_buffer]( scheduler.Record([start, amount, base_offset, query_pool,
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { buffer = *resolve_buffer](vk::CommandBuffer cmdbuf) {
const VkBufferMemoryBarrier copy_query_pool_barrier{ const VkBufferMemoryBarrier copy_query_pool_barrier{
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -292,8 +291,7 @@ public:
} else { } else {
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record( scheduler.Record([buffer = *accumulation_buffer](vk::CommandBuffer cmdbuf) {
[buffer = *accumulation_buffer](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.FillBuffer(buffer, 0, 8, 0); cmdbuf.FillBuffer(buffer, 0, 8, 0);
}); });
} }
@ -611,7 +609,7 @@ public:
void Sync(StagingBufferRef& stagging_buffer, size_t extra_offset, size_t start, size_t size) { void Sync(StagingBufferRef& stagging_buffer, size_t extra_offset, size_t start, size_t size) {
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([this, dst_buffer = stagging_buffer.buffer, extra_offset, start, scheduler.Record([this, dst_buffer = stagging_buffer.buffer, extra_offset, start,
size](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { size](vk::CommandBuffer cmdbuf) {
std::array<VkBufferCopy, 1> copy{VkBufferCopy{ std::array<VkBufferCopy, 1> copy{VkBufferCopy{
.srcOffset = start * QUERY_SIZE, .srcOffset = start * QUERY_SIZE,
.dstOffset = extra_offset, .dstOffset = extra_offset,
@ -796,7 +794,7 @@ public:
.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT, .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
}; };
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, WRITE_BARRIER); VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, WRITE_BARRIER);
}); });
@ -844,14 +842,13 @@ private:
} }
has_flushed_end_pending = true; has_flushed_end_pending = true;
if (!has_started || buffers_count == 0) { if (!has_started || buffers_count == 0) {
scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.BeginTransformFeedbackEXT(0, 0, nullptr, nullptr); cmdbuf.BeginTransformFeedbackEXT(0, 0, nullptr, nullptr);
}); });
UpdateBuffers(); UpdateBuffers();
return; return;
} }
scheduler.Record([this, total = static_cast<u32>(buffers_count)](vk::CommandBuffer cmdbuf, scheduler.Record([this, total = static_cast<u32>(buffers_count)](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
cmdbuf.BeginTransformFeedbackEXT(0, total, counter_buffers.data(), offsets.data()); cmdbuf.BeginTransformFeedbackEXT(0, total, counter_buffers.data(), offsets.data());
}); });
UpdateBuffers(); UpdateBuffers();
@ -865,12 +862,12 @@ private:
has_flushed_end_pending = false; has_flushed_end_pending = false;
if (buffers_count == 0) { if (buffers_count == 0) {
scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.EndTransformFeedbackEXT(0, 0, nullptr, nullptr); cmdbuf.EndTransformFeedbackEXT(0, 0, nullptr, nullptr);
}); });
} else { } else {
scheduler.Record([this, total = static_cast<u32>(buffers_count)]( scheduler.Record([this,
vk::CommandBuffer cmdbuf, vk::CommandBuffer) { total = static_cast<u32>(buffers_count)](vk::CommandBuffer cmdbuf) {
cmdbuf.EndTransformFeedbackEXT(0, total, counter_buffers.data(), offsets.data()); cmdbuf.EndTransformFeedbackEXT(0, total, counter_buffers.data(), offsets.data());
}); });
} }
@ -922,7 +919,7 @@ private:
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([dst_buffer = current_bank->GetBuffer(), scheduler.Record([dst_buffer = current_bank->GetBuffer(),
src_buffer = counter_buffers[stream], src_offset = offsets[stream], src_buffer = counter_buffers[stream], src_offset = offsets[stream],
slot](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { slot](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER); VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER);
std::array<VkBufferCopy, 1> copy{VkBufferCopy{ std::array<VkBufferCopy, 1> copy{VkBufferCopy{
@ -1257,9 +1254,8 @@ void QueryCacheRuntime::PauseHostConditionalRendering() {
return; return;
} }
if (impl->is_hcr_running) { if (impl->is_hcr_running) {
impl->scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { impl->scheduler.Record(
cmdbuf.EndConditionalRenderingEXT(); [](vk::CommandBuffer cmdbuf) { cmdbuf.EndConditionalRenderingEXT(); });
});
} }
impl->is_hcr_running = false; impl->is_hcr_running = false;
} }
@ -1269,8 +1265,7 @@ void QueryCacheRuntime::ResumeHostConditionalRendering() {
return; return;
} }
if (!impl->is_hcr_running) { if (!impl->is_hcr_running) {
impl->scheduler.Record( impl->scheduler.Record([hcr_setup = impl->hcr_setup](vk::CommandBuffer cmdbuf) {
[hcr_setup = impl->hcr_setup](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.BeginConditionalRenderingEXT(hcr_setup); cmdbuf.BeginConditionalRenderingEXT(hcr_setup);
}); });
} }
@ -1442,12 +1437,12 @@ void QueryCacheRuntime::Barriers(bool is_prebarrier) {
.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT, .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
}; };
if (is_prebarrier) { if (is_prebarrier) {
impl->scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { impl->scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER); VK_PIPELINE_STAGE_TRANSFER_BIT, 0, READ_BARRIER);
}); });
} else { } else {
impl->scheduler.Record([](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { impl->scheduler.Record([](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT, cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, WRITE_BARRIER); VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, WRITE_BARRIER);
}); });
@ -1542,9 +1537,8 @@ void QueryCacheRuntime::SyncValues(std::span<SyncValuesType> values, VkBuffer ba
} }
impl->scheduler.RequestOutsideRenderPassOperationContext(); impl->scheduler.RequestOutsideRenderPassOperationContext();
impl->scheduler.Record( impl->scheduler.Record([src_buffer, dst_buffers = std::move(impl->buffers_to_upload_to),
[src_buffer, dst_buffers = std::move(impl->buffers_to_upload_to), vk_copies = std::move(impl->copies_setup)](vk::CommandBuffer cmdbuf) {
vk_copies = std::move(impl->copies_setup)](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
size_t size = dst_buffers.size(); size_t size = dst_buffers.size();
for (size_t i = 0; i < size; i++) { for (size_t i = 0; i < size; i++) {
cmdbuf.CopyBuffer(src_buffer, dst_buffers[i].first, vk_copies[i]); cmdbuf.CopyBuffer(src_buffer, dst_buffers[i].first, vk_copies[i]);

View file

@ -216,7 +216,7 @@ void RasterizerVulkan::Draw(bool is_indexed, u32 instance_count) {
const auto& draw_state = maxwell3d->draw_manager->GetDrawState(); const auto& draw_state = maxwell3d->draw_manager->GetDrawState();
const u32 num_instances{instance_count}; const u32 num_instances{instance_count};
const DrawParams draw_params{MakeDrawParams(draw_state, num_instances, is_indexed)}; const DrawParams draw_params{MakeDrawParams(draw_state, num_instances, is_indexed)};
scheduler.Record([draw_params](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([draw_params](vk::CommandBuffer cmdbuf) {
if (draw_params.is_indexed) { if (draw_params.is_indexed) {
cmdbuf.DrawIndexed(draw_params.num_vertices, draw_params.num_instances, cmdbuf.DrawIndexed(draw_params.num_vertices, draw_params.num_instances,
draw_params.first_index, draw_params.base_vertex, draw_params.first_index, draw_params.base_vertex,
@ -238,7 +238,7 @@ void RasterizerVulkan::DrawIndirect() {
const auto& offset = indirect_buffer.second; const auto& offset = indirect_buffer.second;
if (params.is_byte_count) { if (params.is_byte_count) {
scheduler.Record([buffer_obj = buffer->Handle(), offset, scheduler.Record([buffer_obj = buffer->Handle(), offset,
stride = params.stride](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { stride = params.stride](vk::CommandBuffer cmdbuf) {
cmdbuf.DrawIndirectByteCountEXT(1, 0, buffer_obj, offset, 0, cmdbuf.DrawIndirectByteCountEXT(1, 0, buffer_obj, offset, 0,
static_cast<u32>(stride)); static_cast<u32>(stride));
}); });
@ -250,7 +250,7 @@ void RasterizerVulkan::DrawIndirect() {
const auto& offset_base = count.second; const auto& offset_base = count.second;
scheduler.Record([draw_buffer_obj = draw_buffer->Handle(), scheduler.Record([draw_buffer_obj = draw_buffer->Handle(),
buffer_obj = buffer->Handle(), offset_base, offset, buffer_obj = buffer->Handle(), offset_base, offset,
params](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { params](vk::CommandBuffer cmdbuf) {
if (params.is_indexed) { if (params.is_indexed) {
cmdbuf.DrawIndexedIndirectCount( cmdbuf.DrawIndexedIndirectCount(
buffer_obj, offset, draw_buffer_obj, offset_base, buffer_obj, offset, draw_buffer_obj, offset_base,
@ -263,8 +263,7 @@ void RasterizerVulkan::DrawIndirect() {
}); });
return; return;
} }
scheduler.Record([buffer_obj = buffer->Handle(), offset, params](vk::CommandBuffer cmdbuf, scheduler.Record([buffer_obj = buffer->Handle(), offset, params](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
if (params.is_indexed) { if (params.is_indexed) {
cmdbuf.DrawIndexedIndirect(buffer_obj, offset, cmdbuf.DrawIndexedIndirect(buffer_obj, offset,
static_cast<u32>(params.max_draw_counts), static_cast<u32>(params.max_draw_counts),
@ -388,8 +387,7 @@ void RasterizerVulkan::Clear(u32 layer_count) {
if (regs.clear_surface.R && regs.clear_surface.G && regs.clear_surface.B && if (regs.clear_surface.R && regs.clear_surface.G && regs.clear_surface.B &&
regs.clear_surface.A) { regs.clear_surface.A) {
scheduler.Record([color_attachment, clear_value, clear_rect](vk::CommandBuffer cmdbuf, scheduler.Record([color_attachment, clear_value, clear_rect](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const VkClearAttachment attachment{ const VkClearAttachment attachment{
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
.colorAttachment = color_attachment, .colorAttachment = color_attachment,
@ -436,7 +434,7 @@ void RasterizerVulkan::Clear(u32 layer_count) {
regs.stencil_front_func_mask, dst_region); regs.stencil_front_func_mask, dst_region);
} else { } else {
scheduler.Record([clear_depth = regs.clear_depth, clear_stencil = regs.clear_stencil, scheduler.Record([clear_depth = regs.clear_depth, clear_stencil = regs.clear_stencil,
clear_rect, aspect_flags](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { clear_rect, aspect_flags](vk::CommandBuffer cmdbuf) {
VkClearAttachment attachment; VkClearAttachment attachment;
attachment.aspectMask = aspect_flags; attachment.aspectMask = aspect_flags;
attachment.colorAttachment = 0; attachment.colorAttachment = 0;
@ -468,16 +466,14 @@ void RasterizerVulkan::DispatchCompute() {
buffer_cache.ObtainBuffer(*indirect_address, 12, sync_info, post_op); buffer_cache.ObtainBuffer(*indirect_address, 12, sync_info, post_op);
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([indirect_buffer = buffer->Handle(), scheduler.Record([indirect_buffer = buffer->Handle(),
indirect_offset = offset](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { indirect_offset = offset](vk::CommandBuffer cmdbuf) {
cmdbuf.DispatchIndirect(indirect_buffer, indirect_offset); cmdbuf.DispatchIndirect(indirect_buffer, indirect_offset);
}); });
return; return;
} }
const std::array<u32, 3> dim{qmd.grid_dim_x, qmd.grid_dim_y, qmd.grid_dim_z}; const std::array<u32, 3> dim{qmd.grid_dim_x, qmd.grid_dim_y, qmd.grid_dim_z};
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([dim](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([dim](vk::CommandBuffer cmdbuf) { cmdbuf.Dispatch(dim[0], dim[1], dim[2]); });
cmdbuf.Dispatch(dim[0], dim[1], dim[2]);
});
} }
void RasterizerVulkan::ResetCounter(VideoCommon::QueryType type) { void RasterizerVulkan::ResetCounter(VideoCommon::QueryType type) {
@ -695,7 +691,7 @@ void RasterizerVulkan::WaitForIdle() {
query_cache.NotifyWFI(); query_cache.NotifyWFI();
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([event = *wfi_event, flags](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([event = *wfi_event, flags](vk::CommandBuffer cmdbuf) {
cmdbuf.SetEvent(event, flags); cmdbuf.SetEvent(event, flags);
cmdbuf.WaitEvents(event, flags, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, {}, {}, {}); cmdbuf.WaitEvents(event, flags, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, {}, {}, {});
}); });
@ -953,9 +949,7 @@ void RasterizerVulkan::UpdateViewportsState(Tegra::Engines::Maxwell3D::Regs& reg
.minDepth = 0.0f, .minDepth = 0.0f,
.maxDepth = 1.0f, .maxDepth = 1.0f,
}; };
scheduler.Record([viewport](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([viewport](vk::CommandBuffer cmdbuf) { cmdbuf.SetViewport(0, viewport); });
cmdbuf.SetViewport(0, viewport);
});
return; return;
} }
const bool is_rescaling{texture_cache.IsRescaling()}; const bool is_rescaling{texture_cache.IsRescaling()};
@ -970,7 +964,7 @@ void RasterizerVulkan::UpdateViewportsState(Tegra::Engines::Maxwell3D::Regs& reg
GetViewportState(device, regs, 12, scale), GetViewportState(device, regs, 13, scale), GetViewportState(device, regs, 12, scale), GetViewportState(device, regs, 13, scale),
GetViewportState(device, regs, 14, scale), GetViewportState(device, regs, 15, scale), GetViewportState(device, regs, 14, scale), GetViewportState(device, regs, 15, scale),
}; };
scheduler.Record([this, viewport_list](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([this, viewport_list](vk::CommandBuffer cmdbuf) {
const u32 num_viewports = std::min<u32>(device.GetMaxViewports(), Maxwell::NumViewports); const u32 num_viewports = std::min<u32>(device.GetMaxViewports(), Maxwell::NumViewports);
const vk::Span<VkViewport> viewports(viewport_list.data(), num_viewports); const vk::Span<VkViewport> viewports(viewport_list.data(), num_viewports);
cmdbuf.SetViewport(0, viewports); cmdbuf.SetViewport(0, viewports);
@ -1005,7 +999,7 @@ void RasterizerVulkan::UpdateScissorsState(Tegra::Engines::Maxwell3D::Regs& regs
GetScissorState(regs, 14, up_scale, down_shift), GetScissorState(regs, 14, up_scale, down_shift),
GetScissorState(regs, 15, up_scale, down_shift), GetScissorState(regs, 15, up_scale, down_shift),
}; };
scheduler.Record([this, scissor_list](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([this, scissor_list](vk::CommandBuffer cmdbuf) {
const u32 num_scissors = std::min<u32>(device.GetMaxViewports(), Maxwell::NumViewports); const u32 num_scissors = std::min<u32>(device.GetMaxViewports(), Maxwell::NumViewports);
const vk::Span<VkRect2D> scissors(scissor_list.data(), num_scissors); const vk::Span<VkRect2D> scissors(scissor_list.data(), num_scissors);
cmdbuf.SetScissor(0, scissors); cmdbuf.SetScissor(0, scissors);
@ -1038,10 +1032,9 @@ void RasterizerVulkan::UpdateDepthBias(Tegra::Engines::Maxwell3D::Regs& regs) {
units = static_cast<float>(static_cast<double>(units) * rescale_factor); units = static_cast<float>(static_cast<double>(units) * rescale_factor);
return false; return false;
})(); })();
scheduler.Record( scheduler.Record([constant = units, clamp = regs.depth_bias_clamp,
[constant = units, clamp = regs.depth_bias_clamp, factor = regs.slope_scale_depth_bias, factor = regs.slope_scale_depth_bias, force_unorm,
force_unorm, precise = device.HasExactDepthBiasControl()](vk::CommandBuffer cmdbuf) {
precise = device.HasExactDepthBiasControl()](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
if (force_unorm) { if (force_unorm) {
VkDepthBiasRepresentationInfoEXT info{ VkDepthBiasRepresentationInfoEXT info{
.sType = VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT, .sType = VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT,
@ -1063,19 +1056,16 @@ void RasterizerVulkan::UpdateBlendConstants(Tegra::Engines::Maxwell3D::Regs& reg
} }
const std::array blend_color = {regs.blend_color.r, regs.blend_color.g, regs.blend_color.b, const std::array blend_color = {regs.blend_color.r, regs.blend_color.g, regs.blend_color.b,
regs.blend_color.a}; regs.blend_color.a};
scheduler.Record([blend_color](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record(
cmdbuf.SetBlendConstants(blend_color.data()); [blend_color](vk::CommandBuffer cmdbuf) { cmdbuf.SetBlendConstants(blend_color.data()); });
});
} }
void RasterizerVulkan::UpdateDepthBounds(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateDepthBounds(Tegra::Engines::Maxwell3D::Regs& regs) {
if (!state_tracker.TouchDepthBounds()) { if (!state_tracker.TouchDepthBounds()) {
return; return;
} }
scheduler.Record([min = regs.depth_bounds[0], scheduler.Record([min = regs.depth_bounds[0], max = regs.depth_bounds[1]](
max = regs.depth_bounds[1]](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { vk::CommandBuffer cmdbuf) { cmdbuf.SetDepthBounds(min, max); });
cmdbuf.SetDepthBounds(min, max);
});
} }
void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs) {
@ -1103,8 +1093,7 @@ void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs)
} }
} }
scheduler.Record([front_ref = regs.stencil_front_ref, back_ref = regs.stencil_back_ref, scheduler.Record([front_ref = regs.stencil_front_ref, back_ref = regs.stencil_back_ref,
two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf, two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const bool set_back = two_sided && front_ref != back_ref; const bool set_back = two_sided && front_ref != back_ref;
// Front face // Front face
cmdbuf.SetStencilReference(set_back ? VK_STENCIL_FACE_FRONT_BIT cmdbuf.SetStencilReference(set_back ? VK_STENCIL_FACE_FRONT_BIT
@ -1130,8 +1119,7 @@ void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs)
} }
scheduler.Record([front_write_mask = regs.stencil_front_mask, scheduler.Record([front_write_mask = regs.stencil_front_mask,
back_write_mask = regs.stencil_back_mask, back_write_mask = regs.stencil_back_mask,
two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf, two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const bool set_back = two_sided && front_write_mask != back_write_mask; const bool set_back = two_sided && front_write_mask != back_write_mask;
// Front face // Front face
cmdbuf.SetStencilWriteMask(set_back ? VK_STENCIL_FACE_FRONT_BIT cmdbuf.SetStencilWriteMask(set_back ? VK_STENCIL_FACE_FRONT_BIT
@ -1157,8 +1145,7 @@ void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs)
} }
scheduler.Record([front_test_mask = regs.stencil_front_func_mask, scheduler.Record([front_test_mask = regs.stencil_front_func_mask,
back_test_mask = regs.stencil_back_func_mask, back_test_mask = regs.stencil_back_func_mask,
two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf, two_sided = regs.stencil_two_side_enable](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
const bool set_back = two_sided && front_test_mask != back_test_mask; const bool set_back = two_sided && front_test_mask != back_test_mask;
// Front face // Front face
cmdbuf.SetStencilCompareMask(set_back ? VK_STENCIL_FACE_FRONT_BIT cmdbuf.SetStencilCompareMask(set_back ? VK_STENCIL_FACE_FRONT_BIT
@ -1179,8 +1166,7 @@ void RasterizerVulkan::UpdateLineWidth(Tegra::Engines::Maxwell3D::Regs& regs) {
} }
const float width = const float width =
regs.line_anti_alias_enable ? regs.line_width_smooth : regs.line_width_aliased; regs.line_anti_alias_enable ? regs.line_width_smooth : regs.line_width_aliased;
scheduler.Record( scheduler.Record([width](vk::CommandBuffer cmdbuf) { cmdbuf.SetLineWidth(width); });
[width](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { cmdbuf.SetLineWidth(width); });
} }
void RasterizerVulkan::UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs) {
@ -1188,7 +1174,7 @@ void RasterizerVulkan::UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs) {
return; return;
} }
scheduler.Record([enabled = regs.gl_cull_test_enabled, scheduler.Record([enabled = regs.gl_cull_test_enabled,
cull_face = regs.gl_cull_face](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { cull_face = regs.gl_cull_face](vk::CommandBuffer cmdbuf) {
cmdbuf.SetCullModeEXT(enabled ? MaxwellToVK::CullFace(cull_face) : VK_CULL_MODE_NONE); cmdbuf.SetCullModeEXT(enabled ? MaxwellToVK::CullFace(cull_face) : VK_CULL_MODE_NONE);
}); });
} }
@ -1202,7 +1188,7 @@ void RasterizerVulkan::UpdateDepthBoundsTestEnable(Tegra::Engines::Maxwell3D::Re
LOG_WARNING(Render_Vulkan, "Depth bounds is enabled but not supported"); LOG_WARNING(Render_Vulkan, "Depth bounds is enabled but not supported");
enabled = false; enabled = false;
} }
scheduler.Record([enable = enabled](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([enable = enabled](vk::CommandBuffer cmdbuf) {
cmdbuf.SetDepthBoundsTestEnableEXT(enable); cmdbuf.SetDepthBoundsTestEnableEXT(enable);
}); });
} }
@ -1211,8 +1197,7 @@ void RasterizerVulkan::UpdateDepthTestEnable(Tegra::Engines::Maxwell3D::Regs& re
if (!state_tracker.TouchDepthTestEnable()) { if (!state_tracker.TouchDepthTestEnable()) {
return; return;
} }
scheduler.Record( scheduler.Record([enable = regs.depth_test_enable](vk::CommandBuffer cmdbuf) {
[enable = regs.depth_test_enable](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.SetDepthTestEnableEXT(enable); cmdbuf.SetDepthTestEnableEXT(enable);
}); });
} }
@ -1221,8 +1206,7 @@ void RasterizerVulkan::UpdateDepthWriteEnable(Tegra::Engines::Maxwell3D::Regs& r
if (!state_tracker.TouchDepthWriteEnable()) { if (!state_tracker.TouchDepthWriteEnable()) {
return; return;
} }
scheduler.Record( scheduler.Record([enable = regs.depth_write_enabled](vk::CommandBuffer cmdbuf) {
[enable = regs.depth_write_enabled](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.SetDepthWriteEnableEXT(enable); cmdbuf.SetDepthWriteEnableEXT(enable);
}); });
} }
@ -1231,8 +1215,7 @@ void RasterizerVulkan::UpdatePrimitiveRestartEnable(Tegra::Engines::Maxwell3D::R
if (!state_tracker.TouchPrimitiveRestartEnable()) { if (!state_tracker.TouchPrimitiveRestartEnable()) {
return; return;
} }
scheduler.Record( scheduler.Record([enable = regs.primitive_restart.enabled](vk::CommandBuffer cmdbuf) {
[enable = regs.primitive_restart.enabled](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.SetPrimitiveRestartEnableEXT(enable); cmdbuf.SetPrimitiveRestartEnableEXT(enable);
}); });
} }
@ -1241,8 +1224,7 @@ void RasterizerVulkan::UpdateRasterizerDiscardEnable(Tegra::Engines::Maxwell3D::
if (!state_tracker.TouchRasterizerDiscardEnable()) { if (!state_tracker.TouchRasterizerDiscardEnable()) {
return; return;
} }
scheduler.Record( scheduler.Record([disable = regs.rasterize_enable](vk::CommandBuffer cmdbuf) {
[disable = regs.rasterize_enable](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.SetRasterizerDiscardEnableEXT(disable == 0); cmdbuf.SetRasterizerDiscardEnableEXT(disable == 0);
}); });
} }
@ -1278,16 +1260,15 @@ void RasterizerVulkan::UpdateDepthBiasEnable(Tegra::Engines::Maxwell3D::Regs& re
}; };
const u32 topology_index = static_cast<u32>(maxwell3d->draw_manager->GetDrawState().topology); const u32 topology_index = static_cast<u32>(maxwell3d->draw_manager->GetDrawState().topology);
const u32 enable = enabled_lut[POLYGON_OFFSET_ENABLE_LUT[topology_index]]; const u32 enable = enabled_lut[POLYGON_OFFSET_ENABLE_LUT[topology_index]];
scheduler.Record([enable](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record(
cmdbuf.SetDepthBiasEnableEXT(enable != 0); [enable](vk::CommandBuffer cmdbuf) { cmdbuf.SetDepthBiasEnableEXT(enable != 0); });
});
} }
void RasterizerVulkan::UpdateLogicOpEnable(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateLogicOpEnable(Tegra::Engines::Maxwell3D::Regs& regs) {
if (!state_tracker.TouchLogicOpEnable()) { if (!state_tracker.TouchLogicOpEnable()) {
return; return;
} }
scheduler.Record([enable = regs.logic_op.enable](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([enable = regs.logic_op.enable](vk::CommandBuffer cmdbuf) {
cmdbuf.SetLogicOpEnableEXT(enable != 0); cmdbuf.SetLogicOpEnableEXT(enable != 0);
}); });
} }
@ -1302,16 +1283,15 @@ void RasterizerVulkan::UpdateDepthClampEnable(Tegra::Engines::Maxwell3D::Regs& r
Maxwell::ViewportClipControl::GeometryClip::FrustumXYZ || Maxwell::ViewportClipControl::GeometryClip::FrustumXYZ ||
regs.viewport_clip_control.geometry_clip == regs.viewport_clip_control.geometry_clip ==
Maxwell::ViewportClipControl::GeometryClip::FrustumZ); Maxwell::ViewportClipControl::GeometryClip::FrustumZ);
scheduler.Record([is_enabled](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record(
cmdbuf.SetDepthClampEnableEXT(is_enabled); [is_enabled](vk::CommandBuffer cmdbuf) { cmdbuf.SetDepthClampEnableEXT(is_enabled); });
});
} }
void RasterizerVulkan::UpdateDepthCompareOp(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateDepthCompareOp(Tegra::Engines::Maxwell3D::Regs& regs) {
if (!state_tracker.TouchDepthCompareOp()) { if (!state_tracker.TouchDepthCompareOp()) {
return; return;
} }
scheduler.Record([func = regs.depth_test_func](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([func = regs.depth_test_func](vk::CommandBuffer cmdbuf) {
cmdbuf.SetDepthCompareOpEXT(MaxwellToVK::ComparisonOp(func)); cmdbuf.SetDepthCompareOpEXT(MaxwellToVK::ComparisonOp(func));
}); });
} }
@ -1326,9 +1306,8 @@ void RasterizerVulkan::UpdateFrontFace(Tegra::Engines::Maxwell3D::Regs& regs) {
front_face = front_face == VK_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE front_face = front_face == VK_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE
: VK_FRONT_FACE_CLOCKWISE; : VK_FRONT_FACE_CLOCKWISE;
} }
scheduler.Record([front_face](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record(
cmdbuf.SetFrontFaceEXT(front_face); [front_face](vk::CommandBuffer cmdbuf) { cmdbuf.SetFrontFaceEXT(front_face); });
});
} }
void RasterizerVulkan::UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs) {
@ -1346,7 +1325,7 @@ void RasterizerVulkan::UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs) {
const Maxwell::StencilOp::Op back_zpass = regs.stencil_back_op.zpass; const Maxwell::StencilOp::Op back_zpass = regs.stencil_back_op.zpass;
const Maxwell::ComparisonOp back_compare = regs.stencil_back_op.func; const Maxwell::ComparisonOp back_compare = regs.stencil_back_op.func;
scheduler.Record([fail, zfail, zpass, compare, back_fail, back_zfail, back_zpass, scheduler.Record([fail, zfail, zpass, compare, back_fail, back_zfail, back_zpass,
back_compare](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { back_compare](vk::CommandBuffer cmdbuf) {
cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_BIT, MaxwellToVK::StencilOp(fail), cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_BIT, MaxwellToVK::StencilOp(fail),
MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail), MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail),
MaxwellToVK::ComparisonOp(compare)); MaxwellToVK::ComparisonOp(compare));
@ -1357,8 +1336,7 @@ void RasterizerVulkan::UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs) {
}); });
} else { } else {
// Front face defines the stencil op of both faces // Front face defines the stencil op of both faces
scheduler.Record( scheduler.Record([fail, zfail, zpass, compare](vk::CommandBuffer cmdbuf) {
[fail, zfail, zpass, compare](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_AND_BACK, MaxwellToVK::StencilOp(fail), cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_AND_BACK, MaxwellToVK::StencilOp(fail),
MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail), MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail),
MaxwellToVK::ComparisonOp(compare)); MaxwellToVK::ComparisonOp(compare));
@ -1373,8 +1351,7 @@ void RasterizerVulkan::UpdateLogicOp(Tegra::Engines::Maxwell3D::Regs& regs) {
const auto op_value = static_cast<u32>(regs.logic_op.op); const auto op_value = static_cast<u32>(regs.logic_op.op);
auto op = op_value >= 0x1500 && op_value < 0x1510 ? static_cast<VkLogicOp>(op_value - 0x1500) auto op = op_value >= 0x1500 && op_value < 0x1510 ? static_cast<VkLogicOp>(op_value - 0x1500)
: VK_LOGIC_OP_NO_OP; : VK_LOGIC_OP_NO_OP;
scheduler.Record( scheduler.Record([op](vk::CommandBuffer cmdbuf) { cmdbuf.SetLogicOpEXT(op); });
[op](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { cmdbuf.SetLogicOpEXT(op); });
} }
void RasterizerVulkan::UpdateBlending(Tegra::Engines::Maxwell3D::Regs& regs) { void RasterizerVulkan::UpdateBlending(Tegra::Engines::Maxwell3D::Regs& regs) {
@ -1400,7 +1377,7 @@ void RasterizerVulkan::UpdateBlending(Tegra::Engines::Maxwell3D::Regs& regs) {
current |= VK_COLOR_COMPONENT_A_BIT; current |= VK_COLOR_COMPONENT_A_BIT;
} }
} }
scheduler.Record([setup_masks](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([setup_masks](vk::CommandBuffer cmdbuf) {
cmdbuf.SetColorWriteMaskEXT(0, setup_masks); cmdbuf.SetColorWriteMaskEXT(0, setup_masks);
}); });
} }
@ -1410,7 +1387,7 @@ void RasterizerVulkan::UpdateBlending(Tegra::Engines::Maxwell3D::Regs& regs) {
std::ranges::transform( std::ranges::transform(
regs.blend.enable, setup_enables.begin(), regs.blend.enable, setup_enables.begin(),
[&](const auto& is_enabled) { return is_enabled != 0 ? VK_TRUE : VK_FALSE; }); [&](const auto& is_enabled) { return is_enabled != 0 ? VK_TRUE : VK_FALSE; });
scheduler.Record([setup_enables](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([setup_enables](vk::CommandBuffer cmdbuf) {
cmdbuf.SetColorBlendEnableEXT(0, setup_enables); cmdbuf.SetColorBlendEnableEXT(0, setup_enables);
}); });
} }
@ -1433,7 +1410,7 @@ void RasterizerVulkan::UpdateBlending(Tegra::Engines::Maxwell3D::Regs& regs) {
} }
blend_setup(regs.blend_per_target[index]); blend_setup(regs.blend_per_target[index]);
} }
scheduler.Record([setup_blends](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([setup_blends](vk::CommandBuffer cmdbuf) {
cmdbuf.SetColorBlendEquationEXT(0, setup_blends); cmdbuf.SetColorBlendEquationEXT(0, setup_blends);
}); });
} }
@ -1443,7 +1420,7 @@ void RasterizerVulkan::UpdateStencilTestEnable(Tegra::Engines::Maxwell3D::Regs&
if (!state_tracker.TouchStencilTestEnable()) { if (!state_tracker.TouchStencilTestEnable()) {
return; return;
} }
scheduler.Record([enable = regs.stencil_enable](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([enable = regs.stencil_enable](vk::CommandBuffer cmdbuf) {
cmdbuf.SetStencilTestEnableEXT(enable); cmdbuf.SetStencilTestEnableEXT(enable);
}); });
} }
@ -1501,7 +1478,7 @@ void RasterizerVulkan::UpdateVertexInput(Tegra::Engines::Maxwell3D::Regs& regs)
.divisor = is_instanced ? input_binding.frequency : 1, .divisor = is_instanced ? input_binding.frequency : 1,
}); });
} }
scheduler.Record([bindings, attributes](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([bindings, attributes](vk::CommandBuffer cmdbuf) {
cmdbuf.SetVertexInputEXT(bindings, attributes); cmdbuf.SetVertexInputEXT(bindings, attributes);
}); });
} }

View file

@ -103,8 +103,7 @@ void Scheduler::RequestRenderpass(const Framebuffer* framebuffer) {
state.framebuffer = framebuffer_handle; state.framebuffer = framebuffer_handle;
state.render_area = render_area; state.render_area = render_area;
Record( Record([renderpass, framebuffer_handle, render_area](vk::CommandBuffer cmdbuf) {
[renderpass, framebuffer_handle, render_area](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
const VkRenderPassBeginInfo renderpass_bi{ const VkRenderPassBeginInfo renderpass_bi{
.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
.pNext = nullptr, .pNext = nullptr,
@ -221,8 +220,8 @@ u64 Scheduler::SubmitExecution(VkSemaphore signal_semaphore, VkSemaphore wait_se
InvalidateState(); InvalidateState();
const u64 signal_value = master_semaphore->NextTick(); const u64 signal_value = master_semaphore->NextTick();
Record([signal_semaphore, wait_semaphore, signal_value, this](vk::CommandBuffer cmdbuf, RecordWithUploadBuffer([signal_semaphore, wait_semaphore, signal_value,
vk::CommandBuffer upload_cmdbuf) { this](vk::CommandBuffer cmdbuf, vk::CommandBuffer upload_cmdbuf) {
upload_cmdbuf.End(); upload_cmdbuf.End();
cmdbuf.End(); cmdbuf.End();
@ -286,7 +285,7 @@ void Scheduler::EndRenderPass() {
return; return;
} }
Record([num_images = num_renderpass_images, images = renderpass_images, Record([num_images = num_renderpass_images, images = renderpass_images,
ranges = renderpass_image_ranges](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { ranges = renderpass_image_ranges](vk::CommandBuffer cmdbuf) {
std::array<VkImageMemoryBarrier, 9> barriers; std::array<VkImageMemoryBarrier, 9> barriers;
for (size_t i = 0; i < num_images; ++i) { for (size_t i = 0; i < num_images; ++i) {
barriers[i] = VkImageMemoryBarrier{ barriers[i] = VkImageMemoryBarrier{

View file

@ -80,7 +80,8 @@ public:
/// Send work to a separate thread. /// Send work to a separate thread.
template <typename T> template <typename T>
void Record(T&& command) { requires std::is_invocable_v<T, vk::CommandBuffer, vk::CommandBuffer>
void RecordWithUploadBuffer(T&& command) {
if (chunk->Record(command)) { if (chunk->Record(command)) {
return; return;
} }
@ -88,6 +89,15 @@ public:
(void)chunk->Record(command); (void)chunk->Record(command);
} }
template <typename T>
requires std::is_invocable_v<T, vk::CommandBuffer>
void Record(T&& c) {
this->RecordWithUploadBuffer(
[command = std::move(c)](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
command(cmdbuf);
});
}
/// Returns the current command buffer tick. /// Returns the current command buffer tick.
[[nodiscard]] u64 CurrentTick() const noexcept { [[nodiscard]] u64 CurrentTick() const noexcept {
return master_semaphore->CurrentTick(); return master_semaphore->CurrentTick();

View file

@ -103,7 +103,7 @@ void UploadImage(const Device& device, MemoryAllocator& allocator, Scheduler& sc
}}}; }}};
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([&](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([&](vk::CommandBuffer cmdbuf) {
TransitionImageLayout(cmdbuf, *image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, TransitionImageLayout(cmdbuf, *image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_IMAGE_LAYOUT_UNDEFINED); VK_IMAGE_LAYOUT_UNDEFINED);
cmdbuf.CopyBufferToImage(*upload_buffer, *image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmdbuf.CopyBufferToImage(*upload_buffer, *image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
@ -672,7 +672,7 @@ void SMAA::UploadImages(Scheduler& scheduler) {
UploadImage(m_device, m_allocator, scheduler, m_static_images[Search], search_extent, UploadImage(m_device, m_allocator, scheduler, m_static_images[Search], search_extent,
VK_FORMAT_R8_UNORM, ARRAY_TO_SPAN(searchTexBytes)); VK_FORMAT_R8_UNORM, ARRAY_TO_SPAN(searchTexBytes));
scheduler.Record([&](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([&](vk::CommandBuffer cmdbuf) {
for (auto& images : m_dynamic_images) { for (auto& images : m_dynamic_images) {
for (size_t i = 0; i < MaxDynamicImage; i++) { for (size_t i = 0; i < MaxDynamicImage; i++) {
ClearColorImage(cmdbuf, *images.images[i]); ClearColorImage(cmdbuf, *images.images[i]);
@ -707,7 +707,7 @@ VkImageView SMAA::Draw(Scheduler& scheduler, size_t image_index, VkImage source_
UpdateDescriptorSets(source_image_view, image_index); UpdateDescriptorSets(source_image_view, image_index);
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([=, this](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { scheduler.Record([=, this](vk::CommandBuffer cmdbuf) {
TransitionImageLayout(cmdbuf, source_image, VK_IMAGE_LAYOUT_GENERAL); TransitionImageLayout(cmdbuf, source_image, VK_IMAGE_LAYOUT_GENERAL);
TransitionImageLayout(cmdbuf, edges_image, VK_IMAGE_LAYOUT_GENERAL); TransitionImageLayout(cmdbuf, edges_image, VK_IMAGE_LAYOUT_GENERAL);
BeginRenderPass(cmdbuf, m_renderpasses[EdgeDetection], edge_detection_framebuffer, BeginRenderPass(cmdbuf, m_renderpasses[EdgeDetection], edge_detection_framebuffer,

View file

@ -710,7 +710,7 @@ void BlitScale(Scheduler& scheduler, VkImage src_image, VkImage dst_image, const
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([dst_image, src_image, extent, resources, aspect_mask, resolution, is_2d, scheduler.Record([dst_image, src_image, extent, resources, aspect_mask, resolution, is_2d,
vk_filter, up_scaling](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { vk_filter, up_scaling](vk::CommandBuffer cmdbuf) {
const VkOffset2D src_size{ const VkOffset2D src_size{
.x = static_cast<s32>(up_scaling ? extent.width : resolution.ScaleUp(extent.width)), .x = static_cast<s32>(up_scaling ? extent.width : resolution.ScaleUp(extent.width)),
.y = static_cast<s32>(is_2d && up_scaling ? extent.height .y = static_cast<s32>(is_2d && up_scaling ? extent.height
@ -956,7 +956,7 @@ void TextureCacheRuntime::ReinterpretImage(Image& dst, Image& src,
const VkImage src_image = src.Handle(); const VkImage src_image = src.Handle();
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([dst_image, src_image, copy_buffer, src_aspect_mask, dst_aspect_mask, scheduler.Record([dst_image, src_image, copy_buffer, src_aspect_mask, dst_aspect_mask,
vk_in_copies, vk_out_copies](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { vk_in_copies, vk_out_copies](vk::CommandBuffer cmdbuf) {
RangedBarrierRange dst_range; RangedBarrierRange dst_range;
RangedBarrierRange src_range; RangedBarrierRange src_range;
for (const VkBufferImageCopy& copy : vk_in_copies) { for (const VkBufferImageCopy& copy : vk_in_copies) {
@ -1105,7 +1105,7 @@ void TextureCacheRuntime::BlitImage(Framebuffer* dst_framebuffer, ImageView& dst
const bool is_resolve = is_src_msaa && !is_dst_msaa; const bool is_resolve = is_src_msaa && !is_dst_msaa;
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([filter, dst_region, src_region, dst_image, src_image, dst_layers, src_layers, scheduler.Record([filter, dst_region, src_region, dst_image, src_image, dst_layers, src_layers,
aspect_mask, is_resolve](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { aspect_mask, is_resolve](vk::CommandBuffer cmdbuf) {
const std::array read_barriers{ const std::array read_barriers{
VkImageMemoryBarrier{ VkImageMemoryBarrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
@ -1244,8 +1244,7 @@ void TextureCacheRuntime::CopyImage(Image& dst, Image& src,
const VkImage dst_image = dst.Handle(); const VkImage dst_image = dst.Handle();
const VkImage src_image = src.Handle(); const VkImage src_image = src.Handle();
scheduler.RequestOutsideRenderPassOperationContext(); scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([dst_image, src_image, aspect_mask, vk_copies](vk::CommandBuffer cmdbuf, scheduler.Record([dst_image, src_image, aspect_mask, vk_copies](vk::CommandBuffer cmdbuf) {
vk::CommandBuffer) {
RangedBarrierRange dst_range; RangedBarrierRange dst_range;
RangedBarrierRange src_range; RangedBarrierRange src_range;
for (const VkImageCopy& copy : vk_copies) { for (const VkImageCopy& copy : vk_copies) {
@ -1407,7 +1406,7 @@ void Image::UploadMemory(VkBuffer buffer, VkDeviceSize offset,
const VkImageAspectFlags vk_aspect_mask = aspect_mask; const VkImageAspectFlags vk_aspect_mask = aspect_mask;
const bool is_initialized = std::exchange(initialized, true); const bool is_initialized = std::exchange(initialized, true);
scheduler->Record([src_buffer, vk_image, vk_aspect_mask, is_initialized, scheduler->Record([src_buffer, vk_image, vk_aspect_mask, is_initialized,
vk_copies](vk::CommandBuffer cmdbuf, vk::CommandBuffer) { vk_copies](vk::CommandBuffer cmdbuf) {
CopyBufferToImage(cmdbuf, src_buffer, vk_image, vk_aspect_mask, is_initialized, vk_copies); CopyBufferToImage(cmdbuf, src_buffer, vk_image, vk_aspect_mask, is_initialized, vk_copies);
}); });
if (is_rescaled) { if (is_rescaled) {
@ -1446,8 +1445,7 @@ void Image::DownloadMemory(std::span<VkBuffer> buffers_span, std::span<VkDeviceS
} }
scheduler->RequestOutsideRenderPassOperationContext(); scheduler->RequestOutsideRenderPassOperationContext();
scheduler->Record([buffers = std::move(buffers_vector), image = *original_image, scheduler->Record([buffers = std::move(buffers_vector), image = *original_image,
aspect_mask_ = aspect_mask, aspect_mask_ = aspect_mask, vk_copies](vk::CommandBuffer cmdbuf) {
vk_copies](vk::CommandBuffer cmdbuf, vk::CommandBuffer) {
const VkImageMemoryBarrier read_barrier{ const VkImageMemoryBarrier read_barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr, .pNext = nullptr,
@ -2015,4 +2013,32 @@ void TextureCacheRuntime::AccelerateImageUpload(
ASSERT(false); ASSERT(false);
} }
void TextureCacheRuntime::TransitionImageLayout(Image& image) {
if (!image.ExchangeInitialization()) {
VkImageMemoryBarrier barrier{
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
.pNext = nullptr,
.srcAccessMask = VK_ACCESS_NONE,
.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
.newLayout = VK_IMAGE_LAYOUT_GENERAL,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.image = image.Handle(),
.subresourceRange{
.aspectMask = image.AspectMask(),
.baseMipLevel = 0,
.levelCount = VK_REMAINING_MIP_LEVELS,
.baseArrayLayer = 0,
.layerCount = VK_REMAINING_ARRAY_LAYERS,
},
};
scheduler.RequestOutsideRenderPassOperationContext();
scheduler.Record([barrier = barrier](vk::CommandBuffer cmdbuf) {
cmdbuf.PipelineBarrier(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, barrier);
});
}
}
} // namespace Vulkan } // namespace Vulkan

View file

@ -92,6 +92,8 @@ public:
void InsertUploadMemoryBarrier() {} void InsertUploadMemoryBarrier() {}
void TransitionImageLayout(Image& image);
bool HasBrokenTextureViewFormats() const noexcept { bool HasBrokenTextureViewFormats() const noexcept {
// No known Vulkan driver has broken image views // No known Vulkan driver has broken image views
return false; return false;

View file

@ -1016,6 +1016,7 @@ void TextureCache<P>::RefreshContents(Image& image, ImageId image_id) {
if (image.info.num_samples > 1 && !runtime.CanUploadMSAA()) { if (image.info.num_samples > 1 && !runtime.CanUploadMSAA()) {
LOG_WARNING(HW_GPU, "MSAA image uploads are not implemented"); LOG_WARNING(HW_GPU, "MSAA image uploads are not implemented");
runtime.TransitionImageLayout(image);
return; return;
} }
if (True(image.flags & ImageFlagBits::AsynchronousDecode)) { if (True(image.flags & ImageFlagBits::AsynchronousDecode)) {