Formatting fixes only

- Mostly remove camelcase usage in old code.
- Properly annotate vulkan API imports with _vk prefix to make it clear they are not regular variables.
This commit is contained in:
kd-11 2021-02-22 22:54:25 +03:00 committed by kd-11
parent 3063369322
commit d459da1378
7 changed files with 52 additions and 52 deletions

View file

@ -835,7 +835,7 @@ void VKGSRender::emit_geometry(u32 sub_index)
info.sType = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT; info.sType = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT;
info.buffer = m_cond_render_buffer->value; info.buffer = m_cond_render_buffer->value;
m_device->cmdBeginConditionalRenderingEXT(*m_current_command_buffer, &info); m_device->_vkCmdBeginConditionalRenderingEXT(*m_current_command_buffer, &info);
m_current_command_buffer->flags |= vk::command_buffer::cb_has_conditional_render; m_current_command_buffer->flags |= vk::command_buffer::cb_has_conditional_render;
} }
} }
@ -1029,7 +1029,7 @@ void VKGSRender::end()
if (m_current_command_buffer->flags & vk::command_buffer::cb_has_conditional_render) if (m_current_command_buffer->flags & vk::command_buffer::cb_has_conditional_render)
{ {
m_device->cmdEndConditionalRenderingEXT(*m_current_command_buffer); m_device->_vkCmdEndConditionalRenderingEXT(*m_current_command_buffer);
m_current_command_buffer->flags &= ~(vk::command_buffer::cb_has_conditional_render); m_current_command_buffer->flags &= ~(vk::command_buffer::cb_has_conditional_render);
} }

View file

@ -1971,7 +1971,7 @@ void VKGSRender::close_and_submit_command_buffer(vk::fence* pFence, VkSemaphore
if (m_current_command_buffer->flags & vk::command_buffer::cb_has_conditional_render) if (m_current_command_buffer->flags & vk::command_buffer::cb_has_conditional_render)
{ {
ensure(m_render_pass_open); ensure(m_render_pass_open);
m_device->cmdEndConditionalRenderingEXT(*m_current_command_buffer); m_device->_vkCmdEndConditionalRenderingEXT(*m_current_command_buffer);
} }
#endif #endif

View file

@ -87,11 +87,11 @@ namespace vk
u32 memory_type_index = memory_map.host_visible_coherent; u32 memory_type_index = memory_map.host_visible_coherent;
VkFlags access_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; VkFlags access_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
ensure(memory_map.getMemoryHostPointerPropertiesEXT); ensure(memory_map._vkGetMemoryHostPointerPropertiesEXT);
VkMemoryHostPointerPropertiesEXT memory_properties{}; VkMemoryHostPointerPropertiesEXT memory_properties{};
memory_properties.sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT; memory_properties.sType = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT;
CHECK_RESULT(memory_map.getMemoryHostPointerPropertiesEXT(dev, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, host_pointer, &memory_properties)); CHECK_RESULT(memory_map._vkGetMemoryHostPointerPropertiesEXT(dev, VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, host_pointer, &memory_properties));
VkMemoryRequirements memory_reqs; VkMemoryRequirements memory_reqs;
vkGetBufferMemoryRequirements(m_device, value, &memory_reqs); vkGetBufferMemoryRequirements(m_device, value, &memory_reqs);

View file

@ -44,9 +44,9 @@ namespace vk
features2.pNext = &driver_properties; features2.pNext = &driver_properties;
} }
auto getPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(vkGetInstanceProcAddr(parent, "vkGetPhysicalDeviceFeatures2KHR")); auto _vkGetPhysicalDeviceFeatures2KHR = reinterpret_cast<PFN_vkGetPhysicalDeviceFeatures2KHR>(vkGetInstanceProcAddr(parent, "vkGetPhysicalDeviceFeatures2KHR"));
ensure(getPhysicalDeviceFeatures2KHR); // "vkGetInstanceProcAddress failed to find entry point!" ensure(_vkGetPhysicalDeviceFeatures2KHR); // "vkGetInstanceProcAddress failed to find entry point!"
getPhysicalDeviceFeatures2KHR(dev, &features2); _vkGetPhysicalDeviceFeatures2KHR(dev, &features2);
shader_types_support.allow_float64 = !!features2.features.shaderFloat64; shader_types_support.allow_float64 = !!features2.features.shaderFloat64;
shader_types_support.allow_float16 = !!shader_support_info.shaderFloat16; shader_types_support.allow_float16 = !!shader_support_info.shaderFloat16;
@ -417,8 +417,8 @@ namespace vk
// Import optional function endpoints // Import optional function endpoints
if (pgpu->conditional_render_support) if (pgpu->conditional_render_support)
{ {
cmdBeginConditionalRenderingEXT = reinterpret_cast<PFN_vkCmdBeginConditionalRenderingEXT>(vkGetDeviceProcAddr(dev, "vkCmdBeginConditionalRenderingEXT")); _vkCmdBeginConditionalRenderingEXT = reinterpret_cast<PFN_vkCmdBeginConditionalRenderingEXT>(vkGetDeviceProcAddr(dev, "vkCmdBeginConditionalRenderingEXT"));
cmdEndConditionalRenderingEXT = reinterpret_cast<PFN_vkCmdEndConditionalRenderingEXT>(vkGetDeviceProcAddr(dev, "vkCmdEndConditionalRenderingEXT")); _vkCmdEndConditionalRenderingEXT = reinterpret_cast<PFN_vkCmdEndConditionalRenderingEXT>(vkGetDeviceProcAddr(dev, "vkCmdEndConditionalRenderingEXT"));
} }
memory_map = vk::get_memory_mapping(pdev); memory_map = vk::get_memory_mapping(pdev);
@ -427,7 +427,7 @@ namespace vk
if (pgpu->external_memory_host_support) if (pgpu->external_memory_host_support)
{ {
memory_map.getMemoryHostPointerPropertiesEXT = reinterpret_cast<PFN_vkGetMemoryHostPointerPropertiesEXT>(vkGetDeviceProcAddr(dev, "vkGetMemoryHostPointerPropertiesEXT")); memory_map._vkGetMemoryHostPointerPropertiesEXT = reinterpret_cast<PFN_vkGetMemoryHostPointerPropertiesEXT>(vkGetDeviceProcAddr(dev, "vkGetMemoryHostPointerPropertiesEXT"));
} }
if (g_cfg.video.disable_vulkan_mem_allocator) if (g_cfg.video.disable_vulkan_mem_allocator)

View file

@ -31,7 +31,7 @@ namespace vk
u32 host_visible_coherent; u32 host_visible_coherent;
u32 device_local; u32 device_local;
PFN_vkGetMemoryHostPointerPropertiesEXT getMemoryHostPointerPropertiesEXT; PFN_vkGetMemoryHostPointerPropertiesEXT _vkGetMemoryHostPointerPropertiesEXT;
}; };
class physical_device class physical_device
@ -99,8 +99,8 @@ namespace vk
public: public:
// Exported device endpoints // Exported device endpoints
PFN_vkCmdBeginConditionalRenderingEXT cmdBeginConditionalRenderingEXT = nullptr; PFN_vkCmdBeginConditionalRenderingEXT _vkCmdBeginConditionalRenderingEXT = nullptr;
PFN_vkCmdEndConditionalRenderingEXT cmdEndConditionalRenderingEXT = nullptr; PFN_vkCmdEndConditionalRenderingEXT _vkCmdEndConditionalRenderingEXT = nullptr;
public: public:
render_device() = default; render_device() = default;

View file

@ -59,8 +59,8 @@ namespace vk
VkInstance m_instance = VK_NULL_HANDLE; VkInstance m_instance = VK_NULL_HANDLE;
VkSurfaceKHR m_surface = VK_NULL_HANDLE; VkSurfaceKHR m_surface = VK_NULL_HANDLE;
PFN_vkDestroyDebugReportCallbackEXT destroyDebugReportCallback = nullptr; PFN_vkDestroyDebugReportCallbackEXT _vkDestroyDebugReportCallback = nullptr;
PFN_vkCreateDebugReportCallbackEXT createDebugReportCallback = nullptr; PFN_vkCreateDebugReportCallbackEXT _vkCreateDebugReportCallback = nullptr;
VkDebugReportCallbackEXT m_debugger = nullptr; VkDebugReportCallbackEXT m_debugger = nullptr;
bool extensions_loaded = false; bool extensions_loaded = false;
@ -83,7 +83,7 @@ namespace vk
if (m_debugger) if (m_debugger)
{ {
destroyDebugReportCallback(m_instance, m_debugger, nullptr); _vkDestroyDebugReportCallback(m_instance, m_debugger, nullptr);
m_debugger = nullptr; m_debugger = nullptr;
} }
@ -103,15 +103,15 @@ namespace vk
PFN_vkDebugReportCallbackEXT callback = vk::dbgFunc; PFN_vkDebugReportCallbackEXT callback = vk::dbgFunc;
createDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(vkGetInstanceProcAddr(m_instance, "vkCreateDebugReportCallbackEXT")); _vkCreateDebugReportCallback = reinterpret_cast<PFN_vkCreateDebugReportCallbackEXT>(vkGetInstanceProcAddr(m_instance, "vkCreateDebugReportCallbackEXT"));
destroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(vkGetInstanceProcAddr(m_instance, "vkDestroyDebugReportCallbackEXT")); _vkDestroyDebugReportCallback = reinterpret_cast<PFN_vkDestroyDebugReportCallbackEXT>(vkGetInstanceProcAddr(m_instance, "vkDestroyDebugReportCallbackEXT"));
VkDebugReportCallbackCreateInfoEXT dbgCreateInfo = {}; VkDebugReportCallbackCreateInfoEXT dbgCreateInfo = {};
dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT; dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
dbgCreateInfo.pfnCallback = callback; dbgCreateInfo.pfnCallback = callback;
dbgCreateInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT; dbgCreateInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
CHECK_RESULT(createDebugReportCallback(m_instance, &dbgCreateInfo, NULL, &m_debugger)); CHECK_RESULT(_vkCreateDebugReportCallback(m_instance, &dbgCreateInfo, NULL, &m_debugger));
} }
#ifdef __clang__ #ifdef __clang__
#pragma clang diagnostic push #pragma clang diagnostic push
@ -217,7 +217,7 @@ namespace vk
// Register some global states // Register some global states
if (m_debugger) if (m_debugger)
{ {
destroyDebugReportCallback(m_instance, m_debugger, nullptr); _vkDestroyDebugReportCallback(m_instance, m_debugger, nullptr);
m_debugger = nullptr; m_debugger = nullptr;
} }
@ -315,9 +315,9 @@ namespace vk
vkGetPhysicalDeviceSurfaceSupportKHR(dev, index, m_surface, &supports_present[index]); vkGetPhysicalDeviceSurfaceSupportKHR(dev, index, m_surface, &supports_present[index]);
} }
u32 graphicsQueueNodeIndex = UINT32_MAX; u32 graphics_queue_idx = UINT32_MAX;
u32 presentQueueNodeIndex = UINT32_MAX; u32 present_queue_idx = UINT32_MAX;
u32 transferQueueNodeIndex = UINT32_MAX; u32 transfer_queue_idx = UINT32_MAX;
auto test_queue_family = [&](u32 index, u32 desired_flags) auto test_queue_family = [&](u32 index, u32 desired_flags)
{ {
@ -333,37 +333,37 @@ namespace vk
for (u32 i = 0; i < device_queues; ++i) for (u32 i = 0; i < device_queues; ++i)
{ {
// 1. Test for a present queue possibly one that also supports present // 1. Test for a present queue possibly one that also supports present
if (presentQueueNodeIndex == UINT32_MAX && supports_present[i]) if (present_queue_idx == UINT32_MAX && supports_present[i])
{ {
presentQueueNodeIndex = i; present_queue_idx = i;
if (test_queue_family(i, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)) if (test_queue_family(i, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT))
{ {
graphicsQueueNodeIndex = i; graphics_queue_idx = i;
} }
} }
// 2. Check for graphics support // 2. Check for graphics support
else if (graphicsQueueNodeIndex == UINT32_MAX && test_queue_family(i, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)) else if (graphics_queue_idx == UINT32_MAX && test_queue_family(i, VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT))
{ {
graphicsQueueNodeIndex = i; graphics_queue_idx = i;
if (supports_present[i]) if (supports_present[i])
{ {
presentQueueNodeIndex = i; present_queue_idx = i;
} }
} }
// 3. Check if transfer + compute is available // 3. Check if transfer + compute is available
else if (transferQueueNodeIndex == UINT32_MAX && test_queue_family(i, VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT)) else if (transfer_queue_idx == UINT32_MAX && test_queue_family(i, VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT))
{ {
transferQueueNodeIndex = i; transfer_queue_idx = i;
} }
} }
if (graphicsQueueNodeIndex == UINT32_MAX) if (graphics_queue_idx == UINT32_MAX)
{ {
rsx_log.fatal("Failed to find a suitable graphics queue"); rsx_log.fatal("Failed to find a suitable graphics queue");
return nullptr; return nullptr;
} }
if (graphicsQueueNodeIndex != presentQueueNodeIndex) if (graphics_queue_idx != present_queue_idx)
{ {
// Separate graphics and present, use headless fallback // Separate graphics and present, use headless fallback
present_possible = false; present_possible = false;
@ -374,7 +374,7 @@ namespace vk
//Native(sw) swapchain //Native(sw) swapchain
rsx_log.error("It is not possible for the currently selected GPU to present to the window (Likely caused by NVIDIA driver running the current display)"); rsx_log.error("It is not possible for the currently selected GPU to present to the window (Likely caused by NVIDIA driver running the current display)");
rsx_log.warning("Falling back to software present support (native windowing API)"); rsx_log.warning("Falling back to software present support (native windowing API)");
auto swapchain = new swapchain_NATIVE(dev, UINT32_MAX, graphicsQueueNodeIndex, transferQueueNodeIndex); auto swapchain = new swapchain_NATIVE(dev, UINT32_MAX, graphics_queue_idx, transfer_queue_idx);
swapchain->create(window_handle); swapchain->create(window_handle);
return swapchain; return swapchain;
} }
@ -411,7 +411,7 @@ namespace vk
color_space = surfFormats[0].colorSpace; color_space = surfFormats[0].colorSpace;
return new swapchain_WSI(dev, presentQueueNodeIndex, graphicsQueueNodeIndex, transferQueueNodeIndex, format, m_surface, color_space, force_wm_reporting_off); return new swapchain_WSI(dev, present_queue_idx, graphics_queue_idx, transfer_queue_idx, format, m_surface, color_space, force_wm_reporting_off);
} }
}; };
} }

View file

@ -475,11 +475,11 @@ namespace vk
VkColorSpaceKHR m_color_space = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; VkColorSpaceKHR m_color_space = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
VkSwapchainKHR m_vk_swapchain = nullptr; VkSwapchainKHR m_vk_swapchain = nullptr;
PFN_vkCreateSwapchainKHR createSwapchainKHR = nullptr; PFN_vkCreateSwapchainKHR _vkCreateSwapchainKHR = nullptr;
PFN_vkDestroySwapchainKHR destroySwapchainKHR = nullptr; PFN_vkDestroySwapchainKHR _vkDestroySwapchainKHR = nullptr;
PFN_vkGetSwapchainImagesKHR getSwapchainImagesKHR = nullptr; PFN_vkGetSwapchainImagesKHR _vkGetSwapchainImagesKHR = nullptr;
PFN_vkAcquireNextImageKHR acquireNextImageKHR = nullptr; PFN_vkAcquireNextImageKHR _vkAcquireNextImageKHR = nullptr;
PFN_vkQueuePresentKHR queuePresentKHR = nullptr; PFN_vkQueuePresentKHR _vkQueuePresentKHR = nullptr;
bool m_wm_reports_flag = false; bool m_wm_reports_flag = false;
@ -487,13 +487,13 @@ namespace vk
void init_swapchain_images(render_device& dev, u32 /*preferred_count*/ = 0) override void init_swapchain_images(render_device& dev, u32 /*preferred_count*/ = 0) override
{ {
u32 nb_swap_images = 0; u32 nb_swap_images = 0;
getSwapchainImagesKHR(dev, m_vk_swapchain, &nb_swap_images, nullptr); _vkGetSwapchainImagesKHR(dev, m_vk_swapchain, &nb_swap_images, nullptr);
if (!nb_swap_images) fmt::throw_exception("Driver returned 0 images for swapchain"); if (!nb_swap_images) fmt::throw_exception("Driver returned 0 images for swapchain");
std::vector<VkImage> vk_images; std::vector<VkImage> vk_images;
vk_images.resize(nb_swap_images); vk_images.resize(nb_swap_images);
getSwapchainImagesKHR(dev, m_vk_swapchain, &nb_swap_images, vk_images.data()); _vkGetSwapchainImagesKHR(dev, m_vk_swapchain, &nb_swap_images, vk_images.data());
swapchain_images.resize(nb_swap_images); swapchain_images.resize(nb_swap_images);
for (u32 i = 0; i < nb_swap_images; ++i) for (u32 i = 0; i < nb_swap_images; ++i)
@ -506,11 +506,11 @@ namespace vk
swapchain_WSI(vk::physical_device& gpu, u32 present_queue, u32 graphics_queue, u32 transfer_queue, VkFormat format, VkSurfaceKHR surface, VkColorSpaceKHR color_space, bool force_wm_reporting_off) swapchain_WSI(vk::physical_device& gpu, u32 present_queue, u32 graphics_queue, u32 transfer_queue, VkFormat format, VkSurfaceKHR surface, VkColorSpaceKHR color_space, bool force_wm_reporting_off)
: WSI_swapchain_base(gpu, present_queue, graphics_queue, transfer_queue, format) : WSI_swapchain_base(gpu, present_queue, graphics_queue, transfer_queue, format)
{ {
createSwapchainKHR = reinterpret_cast<PFN_vkCreateSwapchainKHR>(vkGetDeviceProcAddr(dev, "vkCreateSwapchainKHR")); _vkCreateSwapchainKHR = reinterpret_cast<PFN_vkCreateSwapchainKHR>(vkGetDeviceProcAddr(dev, "vkCreateSwapchainKHR"));
destroySwapchainKHR = reinterpret_cast<PFN_vkDestroySwapchainKHR>(vkGetDeviceProcAddr(dev, "vkDestroySwapchainKHR")); _vkDestroySwapchainKHR = reinterpret_cast<PFN_vkDestroySwapchainKHR>(vkGetDeviceProcAddr(dev, "vkDestroySwapchainKHR"));
getSwapchainImagesKHR = reinterpret_cast<PFN_vkGetSwapchainImagesKHR>(vkGetDeviceProcAddr(dev, "vkGetSwapchainImagesKHR")); _vkGetSwapchainImagesKHR = reinterpret_cast<PFN_vkGetSwapchainImagesKHR>(vkGetDeviceProcAddr(dev, "vkGetSwapchainImagesKHR"));
acquireNextImageKHR = reinterpret_cast<PFN_vkAcquireNextImageKHR>(vkGetDeviceProcAddr(dev, "vkAcquireNextImageKHR")); _vkAcquireNextImageKHR = reinterpret_cast<PFN_vkAcquireNextImageKHR>(vkGetDeviceProcAddr(dev, "vkAcquireNextImageKHR"));
queuePresentKHR = reinterpret_cast<PFN_vkQueuePresentKHR>(vkGetDeviceProcAddr(dev, "vkQueuePresentKHR")); _vkQueuePresentKHR = reinterpret_cast<PFN_vkQueuePresentKHR>(vkGetDeviceProcAddr(dev, "vkQueuePresentKHR"));
m_surface = surface; m_surface = surface;
m_color_space = color_space; m_color_space = color_space;
@ -546,7 +546,7 @@ namespace vk
{ {
if (m_vk_swapchain) if (m_vk_swapchain)
{ {
destroySwapchainKHR(pdev, m_vk_swapchain, nullptr); _vkDestroySwapchainKHR(pdev, m_vk_swapchain, nullptr);
} }
dev.destroy(); dev.destroy();
@ -729,7 +729,7 @@ namespace vk
rsx_log.notice("Swapchain: requesting full screen exclusive mode %d.", static_cast<int>(full_screen_exclusive_info.fullScreenExclusive)); rsx_log.notice("Swapchain: requesting full screen exclusive mode %d.", static_cast<int>(full_screen_exclusive_info.fullScreenExclusive));
#endif #endif
createSwapchainKHR(dev, &swap_info, nullptr, &m_vk_swapchain); _vkCreateSwapchainKHR(dev, &swap_info, nullptr, &m_vk_swapchain);
if (old_swapchain) if (old_swapchain)
{ {
@ -738,7 +738,7 @@ namespace vk
swapchain_images.clear(); swapchain_images.clear();
} }
destroySwapchainKHR(dev, old_swapchain, nullptr); _vkDestroySwapchainKHR(dev, old_swapchain, nullptr);
} }
init_swapchain_images(dev); init_swapchain_images(dev);
@ -770,7 +770,7 @@ namespace vk
present.waitSemaphoreCount = 1; present.waitSemaphoreCount = 1;
present.pWaitSemaphores = &semaphore; present.pWaitSemaphores = &semaphore;
return queuePresentKHR(dev.get_present_queue(), &present); return _vkQueuePresentKHR(dev.get_present_queue(), &present);
} }
VkImage get_image(u32 index) override VkImage get_image(u32 index) override