From 500b49d32911bebc3b7cd0423d9aef33aaabec8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=97=B1=20PixelyIon?= Date: Mon, 23 Mar 2020 21:42:37 +0530 Subject: [PATCH] Add Vulkan-Hpp to libraries This commit adds Vulkan-Hpp as a library to the project. The headers are from a modified version of `VulkanHppGenerator`. They are broken into multiple files to avoid exceeding the Intellisense file size limit of Android Studio. --- app/CMakeLists.txt | 6 +- .../VulkanHpp/vulkan/internal/base.hpp | 778 ++ .../VulkanHpp/vulkan/internal/bitmasks.hpp | 3615 +++++++ .../VulkanHpp/vulkan/internal/cmds/Vk.hpp | 185 + .../cmds/VkAccelerationStructureNV.hpp | 40 + .../vulkan/internal/cmds/VkBuffer.hpp | 40 + .../vulkan/internal/cmds/VkBufferView.hpp | 40 + .../vulkan/internal/cmds/VkCommandBuffer.hpp | 1393 +++ .../vulkan/internal/cmds/VkCommandPool.hpp | 40 + .../cmds/VkDebugReportCallbackEXT.hpp | 40 + .../cmds/VkDebugUtilsMessengerEXT.hpp | 40 + .../vulkan/internal/cmds/VkDescriptorPool.hpp | 40 + .../vulkan/internal/cmds/VkDescriptorSet.hpp | 40 + .../internal/cmds/VkDescriptorSetLayout.hpp | 40 + .../cmds/VkDescriptorUpdateTemplate.hpp | 40 + .../vulkan/internal/cmds/VkDevice.hpp | 3996 ++++++++ .../vulkan/internal/cmds/VkDeviceMemory.hpp | 40 + .../vulkan/internal/cmds/VkDisplayKHR.hpp | 40 + .../vulkan/internal/cmds/VkDisplayModeKHR.hpp | 40 + .../vulkan/internal/cmds/VkEvent.hpp | 40 + .../vulkan/internal/cmds/VkFence.hpp | 40 + .../vulkan/internal/cmds/VkFramebuffer.hpp | 40 + .../vulkan/internal/cmds/VkImage.hpp | 40 + .../vulkan/internal/cmds/VkImageView.hpp | 40 + .../cmds/VkIndirectCommandsLayoutNVX.hpp | 40 + .../vulkan/internal/cmds/VkInstance.hpp | 746 ++ .../vulkan/internal/cmds/VkObjectTableNVX.hpp | 40 + .../cmds/VkPerformanceConfigurationINTEL.hpp | 40 + .../vulkan/internal/cmds/VkPhysicalDevice.hpp | 1859 ++++ .../vulkan/internal/cmds/VkPipeline.hpp | 40 + .../vulkan/internal/cmds/VkPipelineCache.hpp | 40 + .../vulkan/internal/cmds/VkPipelineLayout.hpp | 40 + .../vulkan/internal/cmds/VkQueryPool.hpp | 40 + .../vulkan/internal/cmds/VkQueue.hpp | 180 + .../vulkan/internal/cmds/VkRenderPass.hpp | 40 + .../vulkan/internal/cmds/VkSampler.hpp | 40 + .../cmds/VkSamplerYcbcrConversion.hpp | 40 + .../vulkan/internal/cmds/VkSemaphore.hpp | 40 + .../vulkan/internal/cmds/VkShaderModule.hpp | 40 + .../vulkan/internal/cmds/VkSurfaceKHR.hpp | 40 + .../vulkan/internal/cmds/VkSwapchainKHR.hpp | 40 + .../internal/cmds/VkValidationCacheEXT.hpp | 40 + .../VulkanHpp/vulkan/internal/commands.hpp | 74 + .../VulkanHpp/vulkan/internal/dispatch.hpp | 1968 ++++ .../vulkan/internal/dynamicDispatch.hpp | 1640 +++ .../VulkanHpp/vulkan/internal/enums.hpp | 5213 ++++++++++ .../VulkanHpp/vulkan/internal/exceptions.hpp | 497 + .../VulkanHpp/vulkan/internal/forward.hpp | 655 ++ .../VulkanHpp/vulkan/internal/handles.hpp | 6132 +++++++++++ .../VulkanHpp/vulkan/internal/objects.hpp | 251 + .../vulkan/internal/struct/VkAcceleration.hpp | 789 ++ .../vulkan/internal/struct/VkAcquire.hpp | 158 + .../vulkan/internal/struct/VkAllocation.hpp | 159 + .../vulkan/internal/struct/VkAndroid.hpp | 458 + .../vulkan/internal/struct/VkApplication.hpp | 161 + .../vulkan/internal/struct/VkAttachment.hpp | 857 ++ .../vulkan/internal/struct/VkBase.hpp | 174 + .../vulkan/internal/struct/VkBind.hpp | 1778 ++++ .../vulkan/internal/struct/VkBuffer.hpp | 956 ++ .../vulkan/internal/struct/VkCalibrated.hpp | 122 + .../vulkan/internal/struct/VkCheckpoint.hpp | 116 + .../vulkan/internal/struct/VkClear.hpp | 394 + .../vulkan/internal/struct/VkCmd.hpp | 406 + .../vulkan/internal/struct/VkCoarse.hpp | 234 + .../vulkan/internal/struct/VkCommand.hpp | 523 + .../vulkan/internal/struct/VkComponent.hpp | 54 + .../vulkan/internal/struct/VkCompute.hpp | 472 + .../vulkan/internal/struct/VkConditional.hpp | 152 + .../vulkan/internal/struct/VkConformance.hpp | 153 + .../vulkan/internal/struct/VkCooperative.hpp | 209 + .../vulkan/internal/struct/VkCopy.hpp | 199 + .../vulkan/internal/struct/VkD3D.hpp | 169 + .../vulkan/internal/struct/VkDebug.hpp | 1055 ++ .../vulkan/internal/struct/VkDedicated.hpp | 296 + .../vulkan/internal/struct/VkDescriptor.hpp | 1351 +++ .../vulkan/internal/struct/VkDevice.hpp | 2200 ++++ .../vulkan/internal/struct/VkDispatch.hpp | 150 + .../vulkan/internal/struct/VkDisplay.hpp | 1324 +++ .../vulkan/internal/struct/VkDraw.hpp | 345 + .../vulkan/internal/struct/VkDrm.hpp | 203 + .../vulkan/internal/struct/VkEvent.hpp | 143 + .../vulkan/internal/struct/VkExport.hpp | 748 ++ .../vulkan/internal/struct/VkExtension.hpp | 137 + .../vulkan/internal/struct/VkExtent.hpp | 73 + .../vulkan/internal/struct/VkExternal.hpp | 856 ++ .../vulkan/internal/struct/VkFence.hpp | 321 + .../vulkan/internal/struct/VkFilter.hpp | 142 + .../vulkan/internal/struct/VkFormat.hpp | 206 + .../vulkan/internal/struct/VkFramebuffer.hpp | 520 + .../vulkan/internal/struct/VkGeometry.hpp | 79 + .../vulkan/internal/struct/VkGraphics.hpp | 2040 ++++ .../vulkan/internal/struct/VkHdr.hpp | 305 + .../vulkan/internal/struct/VkHeadless.hpp | 155 + .../VulkanHpp/vulkan/internal/struct/VkI.hpp | 169 + .../vulkan/internal/struct/VkImage.hpp | 2161 ++++ .../vulkan/internal/struct/VkImport.hpp | 970 ++ .../vulkan/internal/struct/VkIndirect.hpp | 289 + .../vulkan/internal/struct/VkInitialize.hpp | 160 + .../vulkan/internal/struct/VkInput.hpp | 172 + .../vulkan/internal/struct/VkInstance.hpp | 217 + .../vulkan/internal/struct/VkLayer.hpp | 167 + .../vulkan/internal/struct/VkMac.hpp | 177 + .../vulkan/internal/struct/VkMapped.hpp | 187 + .../vulkan/internal/struct/VkMemory.hpp | 1263 +++ .../vulkan/internal/struct/VkMetal.hpp | 180 + .../vulkan/internal/struct/VkMultisample.hpp | 156 + .../vulkan/internal/struct/VkObject.hpp | 831 ++ .../vulkan/internal/struct/VkOffset.hpp | 97 + .../vulkan/internal/struct/VkPast.hpp | 174 + .../vulkan/internal/struct/VkPerformance.hpp | 564 ++ .../vulkan/internal/struct/VkPhysical.hpp | 8924 +++++++++++++++++ .../vulkan/internal/struct/VkPipeline.hpp | 3258 ++++++ .../vulkan/internal/struct/VkPresent.hpp | 717 ++ .../vulkan/internal/struct/VkProtected.hpp | 176 + .../vulkan/internal/struct/VkPush.hpp | 103 + .../vulkan/internal/struct/VkQuery.hpp | 286 + .../vulkan/internal/struct/VkQueue.hpp | 302 + .../vulkan/internal/struct/VkRay.hpp | 387 + .../vulkan/internal/struct/VkRect.hpp | 108 + .../vulkan/internal/struct/VkRefresh.hpp | 165 + .../vulkan/internal/struct/VkRender.hpp | 1710 ++++ .../vulkan/internal/struct/VkSample.hpp | 111 + .../vulkan/internal/struct/VkSampler.hpp | 712 ++ .../vulkan/internal/struct/VkSemaphore.hpp | 359 + .../vulkan/internal/struct/VkShader.hpp | 452 + .../vulkan/internal/struct/VkShading.hpp | 114 + .../vulkan/internal/struct/VkShared.hpp | 177 + .../vulkan/internal/struct/VkSparse.hpp | 385 + .../internal/struct/VkSpecialization.hpp | 118 + .../vulkan/internal/struct/VkStencil.hpp | 118 + .../vulkan/internal/struct/VkStream.hpp | 206 + .../vulkan/internal/struct/VkSubmit.hpp | 260 + .../vulkan/internal/struct/VkSubpass.hpp | 356 + .../vulkan/internal/struct/VkSubresource.hpp | 122 + .../vulkan/internal/struct/VkSurface.hpp | 826 ++ .../vulkan/internal/struct/VkSwapchain.hpp | 512 + .../vulkan/internal/struct/VkTexture.hpp | 187 + .../vulkan/internal/struct/VkValidation.hpp | 415 + .../vulkan/internal/struct/VkVertex.hpp | 128 + .../VulkanHpp/vulkan/internal/struct/VkVi.hpp | 215 + .../vulkan/internal/struct/VkViewport.hpp | 130 + .../vulkan/internal/struct/VkWayland.hpp | 228 + .../vulkan/internal/struct/VkWin32.hpp | 514 + .../vulkan/internal/struct/VkWrite.hpp | 454 + .../VulkanHpp/vulkan/internal/struct/VkX.hpp | 133 + .../vulkan/internal/struct/VkXcb.hpp | 232 + .../vulkan/internal/struct/VkXlib.hpp | 233 + .../VulkanHpp/vulkan/internal/structs.hpp | 132 + app/libraries/VulkanHpp/vulkan/vulkan.hpp | 3 + 149 files changed, 83749 insertions(+), 3 deletions(-) create mode 100644 app/libraries/VulkanHpp/vulkan/internal/base.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/bitmasks.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/Vk.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkAccelerationStructureNV.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkBuffer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkBufferView.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandBuffer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandPool.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugReportCallbackEXT.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugUtilsMessengerEXT.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorPool.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSet.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSetLayout.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorUpdateTemplate.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDevice.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDeviceMemory.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayKHR.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayModeKHR.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkEvent.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkFence.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkFramebuffer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkImage.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkImageView.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkIndirectCommandsLayoutNVX.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkInstance.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkObjectTableNVX.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkPerformanceConfigurationINTEL.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkPhysicalDevice.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipeline.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineCache.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineLayout.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueryPool.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueue.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkRenderPass.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkSampler.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkSamplerYcbcrConversion.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkSemaphore.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkShaderModule.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkSurfaceKHR.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkSwapchainKHR.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/cmds/VkValidationCacheEXT.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/commands.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/dispatch.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/dynamicDispatch.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/enums.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/exceptions.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/forward.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/handles.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/objects.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkAcceleration.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkAcquire.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkAllocation.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkAndroid.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkApplication.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkAttachment.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkBase.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkBind.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkBuffer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCalibrated.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCheckpoint.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkClear.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCmd.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCoarse.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCommand.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkComponent.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCompute.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkConditional.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkConformance.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCooperative.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkCopy.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkD3D.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDebug.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDedicated.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDescriptor.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDevice.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDispatch.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDisplay.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDraw.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkDrm.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkEvent.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkExport.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkExtension.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkExtent.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkExternal.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkFence.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkFilter.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkFormat.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkFramebuffer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkGeometry.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkGraphics.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkHdr.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkHeadless.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkI.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkImage.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkImport.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkIndirect.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkInitialize.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkInput.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkInstance.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkLayer.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkMac.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkMapped.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkMemory.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkMetal.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkMultisample.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkObject.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkOffset.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPast.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPerformance.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPhysical.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPipeline.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPresent.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkProtected.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkPush.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkQuery.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkQueue.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkRay.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkRect.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkRefresh.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkRender.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSample.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSampler.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSemaphore.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkShader.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkShading.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkShared.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSparse.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSpecialization.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkStencil.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkStream.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSubmit.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSubpass.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSubresource.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSurface.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkSwapchain.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkTexture.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkValidation.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkVertex.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkVi.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkViewport.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkWayland.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkWin32.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkWrite.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkX.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkXcb.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/struct/VkXlib.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/internal/structs.hpp create mode 100644 app/libraries/VulkanHpp/vulkan/vulkan.hpp diff --git a/app/CMakeLists.txt b/app/CMakeLists.txt index 65ec0e76..3776c93c 100644 --- a/app/CMakeLists.txt +++ b/app/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.8) -project(Skyline VERSION 0.3) +project(Skyline LANGUAGES CXX ASM VERSION 0.3) set(BUILD_TESTS OFF) set(BUILD_TESTING OFF) @@ -7,7 +7,6 @@ set(CMAKE_CXX_STANDARD 17) set(CMAKE_CXX_STANDARD_REQUIRED TRUE) set(source_DIR ${CMAKE_SOURCE_DIR}/src/main/cpp) set(CMAKE_CXX_FLAGS_RELEASE "-Ofast -flto=full -Wno-unused-command-line-argument") -enable_language(ASM) if (uppercase_CMAKE_BUILD_TYPE STREQUAL "RELEASE") add_compile_definitions(NDEBUG) endif() @@ -16,7 +15,8 @@ set(CMAKE_POLICY_DEFAULT_CMP0048 OLD) add_subdirectory("libraries/tinyxml2") add_subdirectory("libraries/fmt") add_subdirectory("libraries/oboe") -include_directories (libraries/oboe/include) +include_directories(libraries/oboe/include) +include_directories(libraries/VulkanHpp) set(CMAKE_POLICY_DEFAULT_CMP0048 NEW) include_directories(${source_DIR}/skyline) diff --git a/app/libraries/VulkanHpp/vulkan/internal/base.hpp b/app/libraries/VulkanHpp/vulkan/internal/base.hpp new file mode 100644 index 00000000..8e26dc67 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/base.hpp @@ -0,0 +1,778 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) +# if !defined(VULKAN_HPP_NO_SMART_HANDLE) +# define VULKAN_HPP_NO_SMART_HANDLE +# endif +#else +# include +# include +#endif + +#if !defined(VULKAN_HPP_ASSERT) +# include +# define VULKAN_HPP_ASSERT assert +#endif + +#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL) +# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1 +#endif + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1 +# if defined(__linux__) || defined(__APPLE__) +# include +# endif + +# if defined(_WIN32) +# include +# endif +#endif + +#if 201711 <= __cpp_impl_three_way_comparison +# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR +#endif +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) +# include +#endif + +static_assert( VK_HEADER_VERSION == 121 , "Wrong VK_HEADER_VERSION!" ); + +// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default. +// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) +# if !defined( VULKAN_HPP_TYPESAFE_CONVERSION ) +# define VULKAN_HPP_TYPESAFE_CONVERSION +# endif +#endif + +// includes through some other header +// this results in major(x) being resolved to gnu_dev_major(x) +// which is an expression in a constructor initializer list. +#if defined(major) + #undef major +#endif +#if defined(minor) + #undef minor +#endif + +// Windows defines MemoryBarrier which is deprecated and collides +// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct. +#if defined(MemoryBarrier) + #undef MemoryBarrier +#endif + +#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS) +# if defined(__clang__) +# if __has_feature(cxx_unrestricted_unions) +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(__GNUC__) +# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) +# if 40600 <= GCC_VERSION +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# elif defined(_MSC_VER) +# if 1900 <= _MSC_VER +# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS +# endif +# endif +#endif + +#if !defined(VULKAN_HPP_INLINE) +# if defined(__clang__) +# if __has_attribute(always_inline) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# else +# define VULKAN_HPP_INLINE inline +# endif +# elif defined(__GNUC__) +# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__ +# elif defined(_MSC_VER) +# define VULKAN_HPP_INLINE inline +# else +# define VULKAN_HPP_INLINE inline +# endif +#endif + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) +# define VULKAN_HPP_TYPESAFE_EXPLICIT +#else +# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit +#endif + +#if defined(__cpp_constexpr) +# define VULKAN_HPP_CONSTEXPR constexpr +# if __cpp_constexpr >= 201304 +# define VULKAN_HPP_CONSTEXPR_14 constexpr +# else +# define VULKAN_HPP_CONSTEXPR_14 +# endif +# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr +#else +# define VULKAN_HPP_CONSTEXPR +# define VULKAN_HPP_CONSTEXPR_14 +# define VULKAN_HPP_CONST_OR_CONSTEXPR const +#endif + +#if !defined(VULKAN_HPP_NOEXCEPT) +# if defined(_MSC_VER) && (_MSC_VER <= 1800) +# define VULKAN_HPP_NOEXCEPT +# else +# define VULKAN_HPP_NOEXCEPT noexcept +# define VULKAN_HPP_HAS_NOEXCEPT 1 +# endif +#endif + +#if !defined(VULKAN_HPP_NAMESPACE) +#define VULKAN_HPP_NAMESPACE vk +#endif + +#define VULKAN_HPP_STRINGIFY2(text) #text +#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text) +#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE) + +namespace VULKAN_HPP_NAMESPACE +{ +#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE) + template + class ArrayProxy + { + public: + VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT + : m_count(0) + , m_ptr(nullptr) + {} + + ArrayProxy(typename std::remove_reference::type & ptr) VULKAN_HPP_NOEXCEPT + : m_count(1) + , m_ptr(&ptr) + {} + + ArrayProxy(uint32_t count, T * ptr) VULKAN_HPP_NOEXCEPT + : m_count(count) + , m_ptr(ptr) + {} + + template + ArrayProxy(std::array::type, N> & data) VULKAN_HPP_NOEXCEPT + : m_count(N) + , m_ptr(data.data()) + {} + + template + ArrayProxy(std::array::type, N> const& data) VULKAN_HPP_NOEXCEPT + : m_count(N) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> & data) VULKAN_HPP_NOEXCEPT + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + template ::type>> + ArrayProxy(std::vector::type, Allocator> const& data) VULKAN_HPP_NOEXCEPT + : m_count(static_cast(data.size())) + , m_ptr(data.data()) + {} + + ArrayProxy(std::initializer_list::type> const& data) VULKAN_HPP_NOEXCEPT + : m_count(static_cast(data.end() - data.begin())) + , m_ptr(data.begin()) + {} + + const T * begin() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + const T * end() const VULKAN_HPP_NOEXCEPT + { + return m_ptr + m_count; + } + + const T & front() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *m_ptr; + } + + const T & back() const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT(m_count && m_ptr); + return *(m_ptr + m_count - 1); + } + + bool empty() const VULKAN_HPP_NOEXCEPT + { + return (m_count == 0); + } + + uint32_t size() const VULKAN_HPP_NOEXCEPT + { + return m_count; + } + + T * data() const VULKAN_HPP_NOEXCEPT + { + return m_ptr; + } + + private: + uint32_t m_count; + T * m_ptr; + }; +#endif + + template struct FlagTraits + { + enum { allFlags = 0 }; + }; + + template + class Flags + { + public: + using MaskType = typename std::underlying_type::type; + + // constructors + VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT + : m_mask(0) + {} + + VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT + : m_mask(static_cast(bit)) + {} + + VULKAN_HPP_CONSTEXPR Flags(Flags const& rhs) VULKAN_HPP_NOEXCEPT + : m_mask(rhs.m_mask) + {} + + VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT + : m_mask(flags) + {} + + // relational operators +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>(Flags const&) const = default; +#else + VULKAN_HPP_CONSTEXPR bool operator<(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask < rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator<=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask <= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask > rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator>=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask >= rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator==(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask == rhs.m_mask; + } + + VULKAN_HPP_CONSTEXPR bool operator!=(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return m_mask != rhs.m_mask; + } +#endif + + // logical operator + VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT + { + return !m_mask; + } + + // bitwise operators + VULKAN_HPP_CONSTEXPR Flags operator&(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return Flags(m_mask & rhs.m_mask); + } + + VULKAN_HPP_CONSTEXPR Flags operator|(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return Flags(m_mask | rhs.m_mask); + } + + VULKAN_HPP_CONSTEXPR Flags operator^(Flags const& rhs) const VULKAN_HPP_NOEXCEPT + { + return Flags(m_mask ^ rhs.m_mask); + } + + VULKAN_HPP_CONSTEXPR Flags operator~() const VULKAN_HPP_NOEXCEPT + { + return Flags(m_mask ^ FlagTraits::allFlags); + } + + // assignment operators + Flags & operator=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + { + m_mask = rhs.m_mask; + return *this; + } + + Flags & operator|=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + { + m_mask |= rhs.m_mask; + return *this; + } + + Flags & operator&=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + { + m_mask &= rhs.m_mask; + return *this; + } + + Flags & operator^=(Flags const& rhs) VULKAN_HPP_NOEXCEPT + { + m_mask ^= rhs.m_mask; + return *this; + } + + // cast operators + explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT + { + return !!m_mask; + } + + explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT + { + return m_mask; + } + + private: + MaskType m_mask; + }; + +#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + // relational operators only needed for pre C++20 + template + VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags > bit; + } + + template + VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags >= bit; + } + + template + VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags < bit; + } + + template + VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags <= bit; + } + + template + VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags == bit; + } + + template + VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags != bit; + } +#endif + + // bitwise operators + template + VULKAN_HPP_CONSTEXPR Flags operator&(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags & bit; + } + + template + VULKAN_HPP_CONSTEXPR Flags operator|(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags | bit; + } + + template + VULKAN_HPP_CONSTEXPR Flags operator^(BitType bit, Flags const& flags) VULKAN_HPP_NOEXCEPT + { + return flags ^ bit; + } + + template + class Optional + { + public: + Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; } + Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; } + Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; } + + operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; } + RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; } + explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; } + + private: + RefType *m_ptr; + }; + + template struct isStructureChainValid { enum { value = false }; }; + + template + struct TypeList + { + using list = P; + using last = T; + }; + + template + struct extendCheck + { + static const bool valid = isStructureChainValid::value || extendCheck::valid; + }; + + template + struct extendCheck,X> + { + static const bool valid = isStructureChainValid::value; + }; + + template + struct extendCheck + { + static const bool valid = true; + }; + + template + struct isPartOfStructureChain + { + static const bool valid = false; + }; + + template + struct isPartOfStructureChain + { + static const bool valid = std::is_same::value || isPartOfStructureChain::valid; + }; + + template + class StructureChainElement + { + public: + explicit operator Element&() VULKAN_HPP_NOEXCEPT { return value; } + explicit operator const Element&() const VULKAN_HPP_NOEXCEPT { return value; } + private: + Element value; + }; + + template + class StructureChain : private StructureChainElement... + { + public: + StructureChain() VULKAN_HPP_NOEXCEPT + { + link(); + } + + StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + { + linkAndCopy(rhs); + } + + StructureChain(StructureElements const &... elems) VULKAN_HPP_NOEXCEPT + { + linkAndCopyElements(elems...); + } + + StructureChain& operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + { + linkAndCopy(rhs); + return *this; + } + + template ClassType& get() VULKAN_HPP_NOEXCEPT { return static_cast(*this);} + + template const ClassType& get() const VULKAN_HPP_NOEXCEPT { return static_cast(*this);} + + template + std::tuple get() + { + return std::tie(get(), get(), get()...); + } + + template + std::tuple get() const + { + return std::tie(get(), get(), get()...); + } + + template + void unlink() VULKAN_HPP_NOEXCEPT + { + static_assert(isPartOfStructureChain::valid, "Can't unlink Structure that's not part of this StructureChain!"); + static_assert(!std::is_same>::type>::value, "It's not allowed to unlink the first element!"); + VkBaseOutStructure * ptr = reinterpret_cast(&get()); + VULKAN_HPP_ASSERT(ptr != nullptr); + VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); + VULKAN_HPP_ASSERT(*ppNext != nullptr); + while (*ppNext != ptr) + { + ppNext = &(*ppNext)->pNext; + VULKAN_HPP_ASSERT(*ppNext != nullptr); // fires, if the ClassType member has already been unlinked ! + } + VULKAN_HPP_ASSERT(*ppNext == ptr); + *ppNext = (*ppNext)->pNext; + } + + template + void relink() VULKAN_HPP_NOEXCEPT + { + static_assert(isPartOfStructureChain::valid, "Can't relink Structure that's not part of this StructureChain!"); + static_assert(!std::is_same>::type>::value, "It's not allowed to have the first element unlinked!"); + VkBaseOutStructure * ptr = reinterpret_cast(&get()); + VULKAN_HPP_ASSERT(ptr != nullptr); + VkBaseOutStructure ** ppNext = &(reinterpret_cast(this)->pNext); + VULKAN_HPP_ASSERT(*ppNext != nullptr); +#if !defined(NDEBUG) + while (*ppNext) + { + VULKAN_HPP_ASSERT(*ppNext != ptr); // fires, if the ClassType member has not been unlinked before + ppNext = &(*ppNext)->pNext; + } + ppNext = &(reinterpret_cast(this)->pNext); +#endif + ptr->pNext = *ppNext; + *ppNext = ptr; + } + + private: + template + void link() VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + } + + template + void link() VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x.pNext = &y; + link, Y, Z...>(); + } + + template + void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + static_cast(*this) = static_cast(rhs); + } + + template + void linkAndCopy(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x = static_cast(rhs); + x.pNext = &y; + linkAndCopy, Y, Z...>(rhs); + } + + template + void linkAndCopyElements(X const &xelem) VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + static_cast(*this) = xelem; + } + + template + void linkAndCopyElements(X const &xelem, Y const &yelem, Z const &... zelem) VULKAN_HPP_NOEXCEPT + { + static_assert(extendCheck::valid, "The structure chain is not valid!"); + X& x = static_cast(*this); + Y& y = static_cast(*this); + x = xelem; + x.pNext = &y; + linkAndCopyElements, Y, Z...>(yelem, zelem...); + } + }; + +#if !defined(VULKAN_HPP_NO_SMART_HANDLE) + template class UniqueHandleTraits; + + template + class UniqueHandle : public UniqueHandleTraits::deleter + { + private: + using Deleter = typename UniqueHandleTraits::deleter; + + public: + using element_type = Type; + + UniqueHandle() + : Deleter() + , m_value() + {} + + explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT + : Deleter( deleter) + , m_value( value ) + {} + + UniqueHandle( UniqueHandle const& ) = delete; + + UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + : Deleter( std::move( static_cast( other ) ) ) + , m_value( other.release() ) + {} + + ~UniqueHandle() VULKAN_HPP_NOEXCEPT + { + if ( m_value ) this->destroy( m_value ); + } + + UniqueHandle & operator=( UniqueHandle const& ) = delete; + + UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT + { + reset( other.release() ); + *static_cast(this) = std::move( static_cast(other) ); + return *this; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_value.operator bool(); + } + + Type const* operator->() const VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type * operator->() VULKAN_HPP_NOEXCEPT + { + return &m_value; + } + + Type const& operator*() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & operator*() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + const Type & get() const VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + Type & get() VULKAN_HPP_NOEXCEPT + { + return m_value; + } + + void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT + { + if ( m_value != value ) + { + if ( m_value ) this->destroy( m_value ); + m_value = value; + } + } + + Type release() VULKAN_HPP_NOEXCEPT + { + Type value = m_value; + m_value = nullptr; + return value; + } + + void swap( UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap(m_value, rhs.m_value); + std::swap(static_cast(*this), static_cast(rhs)); + } + + private: + Type m_value; + }; + + template + VULKAN_HPP_INLINE std::vector uniqueToRaw(std::vector const& handles) + { + std::vector newBuffer(handles.size()); + std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); }); + return newBuffer; + } + + template + VULKAN_HPP_INLINE void swap( UniqueHandle & lhs, UniqueHandle & rhs ) VULKAN_HPP_NOEXCEPT + { + lhs.swap( rhs ); + } +#endif +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/bitmasks.hpp b/app/libraries/VulkanHpp/vulkan/internal/bitmasks.hpp new file mode 100644 index 00000000..a7d6daba --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/bitmasks.hpp @@ -0,0 +1,3615 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "enums.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + using AccessFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) | VkFlags(AccessFlagBits::eIndexRead) | VkFlags(AccessFlagBits::eVertexAttributeRead) | VkFlags(AccessFlagBits::eUniformRead) | VkFlags(AccessFlagBits::eInputAttachmentRead) | VkFlags(AccessFlagBits::eShaderRead) | VkFlags(AccessFlagBits::eShaderWrite) | VkFlags(AccessFlagBits::eColorAttachmentRead) | VkFlags(AccessFlagBits::eColorAttachmentWrite) | VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) | VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) | VkFlags(AccessFlagBits::eTransferRead) | VkFlags(AccessFlagBits::eTransferWrite) | VkFlags(AccessFlagBits::eHostRead) | VkFlags(AccessFlagBits::eHostWrite) | VkFlags(AccessFlagBits::eMemoryRead) | VkFlags(AccessFlagBits::eMemoryWrite) | VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) | VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) | VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) | VkFlags(AccessFlagBits::eCommandProcessReadNVX) | VkFlags(AccessFlagBits::eCommandProcessWriteNVX) | VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) | VkFlags(AccessFlagBits::eShadingRateImageReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureReadNV) | VkFlags(AccessFlagBits::eAccelerationStructureWriteNV) | VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AccessFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AccessFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AccessFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | "; + if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | "; + if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | "; + if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | "; + if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | "; + if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | "; + if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | "; + if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | "; + if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | "; + if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | "; + if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | "; + if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | "; + if ( value & AccessFlagBits::eHostRead ) result += "HostRead | "; + if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | "; + if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | "; + if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | "; + if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | "; + if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | "; + if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | "; + if ( value & AccessFlagBits::eCommandProcessReadNVX ) result += "CommandProcessReadNVX | "; + if ( value & AccessFlagBits::eCommandProcessWriteNVX ) result += "CommandProcessWriteNVX | "; + if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | "; + if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | "; + if ( value & AccessFlagBits::eAccelerationStructureReadNV ) result += "AccelerationStructureReadNV | "; + if ( value & AccessFlagBits::eAccelerationStructureWriteNV ) result += "AccelerationStructureWriteNV | "; + if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using AndroidSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + using AttachmentDescriptionFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return AttachmentDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( AttachmentDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using BufferCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) | VkFlags(BufferCreateFlagBits::eSparseResidency) | VkFlags(BufferCreateFlagBits::eSparseAliased) | VkFlags(BufferCreateFlagBits::eProtected) | VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; + if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; + if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; + if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | "; + if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT ) result += "DeviceAddressCaptureReplayEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using BufferUsageFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) | VkFlags(BufferUsageFlagBits::eTransferDst) | VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) | VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) | VkFlags(BufferUsageFlagBits::eUniformBuffer) | VkFlags(BufferUsageFlagBits::eStorageBuffer) | VkFlags(BufferUsageFlagBits::eIndexBuffer) | VkFlags(BufferUsageFlagBits::eVertexBuffer) | VkFlags(BufferUsageFlagBits::eIndirectBuffer) | VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) | VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) | VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) | VkFlags(BufferUsageFlagBits::eRayTracingNV) | VkFlags(BufferUsageFlagBits::eShaderDeviceAddressEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return BufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; + if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | "; + if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits::eRayTracingNV ) result += "RayTracingNV | "; + if ( value & BufferUsageFlagBits::eShaderDeviceAddressEXT ) result += "ShaderDeviceAddressEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using BufferViewCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags ) + { + return "{}"; + } + + using BuildAccelerationStructureFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowUpdate) | VkFlags(BuildAccelerationStructureFlagBitsNV::eAllowCompaction) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastTrace) | VkFlags(BuildAccelerationStructureFlagBitsNV::ePreferFastBuild) | VkFlags(BuildAccelerationStructureFlagBitsNV::eLowMemory) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator|( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator&( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator^( BuildAccelerationStructureFlagBitsNV bit0, BuildAccelerationStructureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return BuildAccelerationStructureFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsNV operator~( BuildAccelerationStructureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( BuildAccelerationStructureFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & BuildAccelerationStructureFlagBitsNV::eAllowUpdate ) result += "AllowUpdate | "; + if ( value & BuildAccelerationStructureFlagBitsNV::eAllowCompaction ) result += "AllowCompaction | "; + if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastTrace ) result += "PreferFastTrace | "; + if ( value & BuildAccelerationStructureFlagBitsNV::ePreferFastBuild ) result += "PreferFastBuild | "; + if ( value & BuildAccelerationStructureFlagBitsNV::eLowMemory ) result += "LowMemory | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ColorComponentFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ColorComponentFlagBits::eR) | VkFlags(ColorComponentFlagBits::eG) | VkFlags(ColorComponentFlagBits::eB) | VkFlags(ColorComponentFlagBits::eA) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ColorComponentFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ColorComponentFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ColorComponentFlagBits::eR ) result += "R | "; + if ( value & ColorComponentFlagBits::eG ) result += "G | "; + if ( value & ColorComponentFlagBits::eB ) result += "B | "; + if ( value & ColorComponentFlagBits::eA ) result += "A | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using CommandBufferResetFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using CommandBufferUsageFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) | VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) | VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandBufferUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CommandBufferUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | "; + if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | "; + if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using CommandPoolCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) | VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) | VkFlags(CommandPoolCreateFlagBits::eProtected) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | "; + if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | "; + if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using CommandPoolResetFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CommandPoolResetFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CommandPoolResetFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class CommandPoolTrimFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits ) + { + return "(void)"; + } + + using CommandPoolTrimFlags = Flags; + + using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags ) + { + return "{}"; + } + + using CompositeAlphaFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) | VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) | VkFlags(CompositeAlphaFlagBitsKHR::eInherit) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return CompositeAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CompositeAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; + if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | "; + if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ConditionalRenderingFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ConditionalRenderingFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using CullModeFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(CullModeFlagBits::eNone) | VkFlags(CullModeFlagBits::eFront) | VkFlags(CullModeFlagBits::eBack) | VkFlags(CullModeFlagBits::eFrontAndBack) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return CullModeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( CullModeFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( CullModeFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & CullModeFlagBits::eFront ) result += "Front | "; + if ( value & CullModeFlagBits::eBack ) result += "Back | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DebugReportFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) | VkFlags(DebugReportFlagBitsEXT::eWarning) | VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) | VkFlags(DebugReportFlagBitsEXT::eError) | VkFlags(DebugReportFlagBitsEXT::eDebug) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugReportFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugReportFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | "; + if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | "; + if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | "; + if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | "; + if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DebugUtilsMessageSeverityFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) | VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | "; + if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DebugUtilsMessageTypeFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) | VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DebugUtilsMessageTypeFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | "; + if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCallbackDataFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT ) + { + return "{}"; + } + + enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT ) + { + return "(void)"; + } + + using DebugUtilsMessengerCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT ) + { + return "{}"; + } + + using DependencyFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DependencyFlagBits::eByRegion) | VkFlags(DependencyFlagBits::eDeviceGroup) | VkFlags(DependencyFlagBits::eViewLocal) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DependencyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DependencyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DependencyFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | "; + if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | "; + if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DescriptorBindingFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DescriptorBindingFlagBitsEXT::eUpdateAfterBind) | VkFlags(DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending) | VkFlags(DescriptorBindingFlagBitsEXT::ePartiallyBound) | VkFlags(DescriptorBindingFlagBitsEXT::eVariableDescriptorCount) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator|( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator&( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator^( DescriptorBindingFlagBitsEXT bit0, DescriptorBindingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorBindingFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlagsEXT operator~( DescriptorBindingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorBindingFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DescriptorBindingFlagBitsEXT::eUpdateAfterBind ) result += "UpdateAfterBind | "; + if ( value & DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | "; + if ( value & DescriptorBindingFlagBitsEXT::ePartiallyBound ) result += "PartiallyBound | "; + if ( value & DescriptorBindingFlagBitsEXT::eVariableDescriptorCount ) result += "VariableDescriptorCount | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DescriptorPoolCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) | VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorPoolCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorPoolCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | "; + if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT ) result += "UpdateAfterBindEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class DescriptorPoolResetFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits ) + { + return "(void)"; + } + + using DescriptorPoolResetFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags ) + { + return "{}"; + } + + using DescriptorSetLayoutCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) | VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DescriptorSetLayoutCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT ) result += "UpdateAfterBindPoolEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits ) + { + return "(void)"; + } + + using DescriptorUpdateTemplateCreateFlags = Flags; + + using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags ) + { + return "{}"; + } + + using DeviceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags ) + { + return "{}"; + } + + using DeviceGroupPresentModeFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) | VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceGroupPresentModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | "; + if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using DeviceQueueCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return DeviceQueueCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DeviceQueueCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class DisplayModeCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR ) + { + return "(void)"; + } + + using DisplayModeCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR ) + { + return "{}"; + } + + using DisplayPlaneAlphaFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) | VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( DisplayPlaneAlphaFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | "; + if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using DisplaySurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR ) + { + return "{}"; + } + + enum class EventCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits ) + { + return "(void)"; + } + + using EventCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( EventCreateFlags ) + { + return "{}"; + } + + using ExternalFenceFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) | VkFlags(ExternalFenceFeatureFlagBits::eImportable) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceFeatureFlags( bits ) ); + } + + using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | "; + if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalFenceHandleTypeFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalFenceHandleTypeFlags( bits ) ); + } + + using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; + if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; + if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalMemoryFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBits::eExportable) | VkFlags(ExternalMemoryFeatureFlagBits::eImportable) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlags( bits ) ); + } + + using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalMemoryFeatureFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) | VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryFeatureFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | "; + if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalMemoryHandleTypeFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) | VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) | VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) | VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlags( bits ) ); + } + + using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | "; + if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalMemoryHandleTypeFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) | VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalMemoryHandleTypeFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | "; + if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalSemaphoreFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) | VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreFeatureFlags( bits ) ); + } + + using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | "; + if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ExternalSemaphoreHandleTypeFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) | VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ExternalSemaphoreHandleTypeFlags( bits ) ); + } + + using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | "; + if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using FenceCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(FenceCreateFlagBits::eSignaled) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using FenceImportFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(FenceImportFlagBits::eTemporary) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FenceImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FenceImportFlags( bits ) ); + } + + using FenceImportFlagsKHR = FenceImportFlags; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using FormatFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) | VkFlags(FormatFeatureFlagBits::eStorageImage) | VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) | VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) | VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) | VkFlags(FormatFeatureFlagBits::eVertexBuffer) | VkFlags(FormatFeatureFlagBits::eColorAttachment) | VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) | VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) | VkFlags(FormatFeatureFlagBits::eBlitSrc) | VkFlags(FormatFeatureFlagBits::eBlitDst) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) | VkFlags(FormatFeatureFlagBits::eTransferSrc) | VkFlags(FormatFeatureFlagBits::eTransferDst) | VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) | VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) | VkFlags(FormatFeatureFlagBits::eDisjoint) | VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) | VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT) | VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FormatFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FormatFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | "; + if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | "; + if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | "; + if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | "; + if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | "; + if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | "; + if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | "; + if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | "; + if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; + if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | "; + if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; + if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | "; + if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | "; + if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | "; + if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | "; + if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | "; + if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT ) result += "SampledImageFilterMinmaxEXT | "; + if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using FramebufferCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(FramebufferCreateFlagBits::eImagelessKHR) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return FramebufferCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( FramebufferCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & FramebufferCreateFlagBits::eImagelessKHR ) result += "ImagelessKHR | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using GeometryFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(GeometryFlagBitsNV::eOpaque) | VkFlags(GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator|( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator&( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator^( GeometryFlagBitsNV bit0, GeometryFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsNV operator~( GeometryFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( GeometryFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & GeometryFlagBitsNV::eOpaque ) result += "Opaque | "; + if ( value & GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using GeometryInstanceFlagsNV = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(GeometryInstanceFlagBitsNV::eTriangleCullDisable) | VkFlags(GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise) | VkFlags(GeometryInstanceFlagBitsNV::eForceOpaque) | VkFlags(GeometryInstanceFlagBitsNV::eForceNoOpaque) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator|( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsNV( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator&( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsNV( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator^( GeometryInstanceFlagBitsNV bit0, GeometryInstanceFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT + { + return GeometryInstanceFlagsNV( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsNV operator~( GeometryInstanceFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT + { + return ~( GeometryInstanceFlagsNV( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsNV value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & GeometryInstanceFlagBitsNV::eTriangleCullDisable ) result += "TriangleCullDisable | "; + if ( value & GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | "; + if ( value & GeometryInstanceFlagBitsNV::eForceOpaque ) result += "ForceOpaque | "; + if ( value & GeometryInstanceFlagBitsNV::eForceNoOpaque ) result += "ForceNoOpaque | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using HeadlessSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT ) + { + return "{}"; + } + +#ifdef VK_USE_PLATFORM_IOS_MVK + enum class IOSSurfaceCreateFlagBitsMVK : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using IOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + + using ImageAspectFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ImageAspectFlagBits::eColor) | VkFlags(ImageAspectFlagBits::eDepth) | VkFlags(ImageAspectFlagBits::eStencil) | VkFlags(ImageAspectFlagBits::eMetadata) | VkFlags(ImageAspectFlagBits::ePlane0) | VkFlags(ImageAspectFlagBits::ePlane1) | VkFlags(ImageAspectFlagBits::ePlane2) | VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) | VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageAspectFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageAspectFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ImageAspectFlagBits::eColor ) result += "Color | "; + if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | "; + if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | "; + if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | "; + if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | "; + if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | "; + if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | "; + if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | "; + if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ImageCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) | VkFlags(ImageCreateFlagBits::eSparseResidency) | VkFlags(ImageCreateFlagBits::eSparseAliased) | VkFlags(ImageCreateFlagBits::eMutableFormat) | VkFlags(ImageCreateFlagBits::eCubeCompatible) | VkFlags(ImageCreateFlagBits::eAlias) | VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) | VkFlags(ImageCreateFlagBits::e2DArrayCompatible) | VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) | VkFlags(ImageCreateFlagBits::eExtendedUsage) | VkFlags(ImageCreateFlagBits::eProtected) | VkFlags(ImageCreateFlagBits::eDisjoint) | VkFlags(ImageCreateFlagBits::eCornerSampledNV) | VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) | VkFlags(ImageCreateFlagBits::eSubsampledEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | "; + if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | "; + if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | "; + if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | "; + if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | "; + if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | "; + if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; + if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | "; + if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | "; + if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | "; + if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | "; + if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | "; + if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | "; + if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | "; + if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + +#ifdef VK_USE_PLATFORM_FUCHSIA + enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA ) + { + return "(void)"; + } + + using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags; + + VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + using ImageUsageFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) | VkFlags(ImageUsageFlagBits::eTransferDst) | VkFlags(ImageUsageFlagBits::eSampled) | VkFlags(ImageUsageFlagBits::eStorage) | VkFlags(ImageUsageFlagBits::eColorAttachment) | VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) | VkFlags(ImageUsageFlagBits::eTransientAttachment) | VkFlags(ImageUsageFlagBits::eInputAttachment) | VkFlags(ImageUsageFlagBits::eShadingRateImageNV) | VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageUsageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageUsageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | "; + if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | "; + if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | "; + if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | "; + if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | "; + if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | "; + if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | "; + if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; + if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; + if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ImageViewCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ImageViewCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ImageViewCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using IndirectCommandsLayoutUsageFlagsNVX = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions) | VkFlags(IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator|( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator&( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator^( IndirectCommandsLayoutUsageFlagBitsNVX bit0, IndirectCommandsLayoutUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return IndirectCommandsLayoutUsageFlagsNVX( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNVX operator~( IndirectCommandsLayoutUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT + { + return ~( IndirectCommandsLayoutUsageFlagsNVX( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNVX value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences ) result += "UnorderedSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences ) result += "SparseSequences | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions ) result += "EmptyExecutions | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences ) result += "IndexedSequences | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using InstanceCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags ) + { + return "{}"; + } + +#ifdef VK_USE_PLATFORM_MACOS_MVK + enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK ) + { + return "(void)"; + } + + using MacOSSurfaceCreateFlagsMVK = Flags; + + VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + using MemoryAllocateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryAllocateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryAllocateFlags( bits ) ); + } + + using MemoryAllocateFlagsKHR = MemoryAllocateFlags; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using MemoryHeapFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) | VkFlags(MemoryHeapFlagBits::eMultiInstance) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryHeapFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryHeapFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | "; + if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class MemoryMapFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits ) + { + return "(void)"; + } + + using MemoryMapFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags ) + { + return "{}"; + } + + using MemoryPropertyFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) | VkFlags(MemoryPropertyFlagBits::eHostVisible) | VkFlags(MemoryPropertyFlagBits::eHostCoherent) | VkFlags(MemoryPropertyFlagBits::eHostCached) | VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) | VkFlags(MemoryPropertyFlagBits::eProtected) | VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) | VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return MemoryPropertyFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( MemoryPropertyFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | "; + if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | "; + if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | "; + if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | "; + if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | "; + if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | "; + if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | "; + if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + +#ifdef VK_USE_PLATFORM_METAL_EXT + enum class MetalSurfaceCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT ) + { + return "(void)"; + } + + using MetalSurfaceCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + using ObjectEntryUsageFlagsNVX = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ObjectEntryUsageFlagBitsNVX::eGraphics) | VkFlags(ObjectEntryUsageFlagBitsNVX::eCompute) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator|( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return ObjectEntryUsageFlagsNVX( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator&( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return ObjectEntryUsageFlagsNVX( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator^( ObjectEntryUsageFlagBitsNVX bit0, ObjectEntryUsageFlagBitsNVX bit1 ) VULKAN_HPP_NOEXCEPT + { + return ObjectEntryUsageFlagsNVX( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ObjectEntryUsageFlagsNVX operator~( ObjectEntryUsageFlagBitsNVX bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ObjectEntryUsageFlagsNVX( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagsNVX value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ObjectEntryUsageFlagBitsNVX::eGraphics ) result += "Graphics | "; + if ( value & ObjectEntryUsageFlagBitsNVX::eCompute ) result += "Compute | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PeerMemoryFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) | VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) | VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) | VkFlags(PeerMemoryFeatureFlagBits::eGenericDst) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PeerMemoryFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PeerMemoryFeatureFlags( bits ) ); + } + + using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | "; + if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | "; + if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PipelineCacheCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags ) + { + return "{}"; + } + + using PipelineColorBlendStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags ) + { + return "{}"; + } + + using PipelineCompilerControlFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD ) + { + return "{}"; + } + + enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageModulationStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV ) + { + return "{}"; + } + + enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageReductionStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV ) + { + return "{}"; + } + + enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineCoverageToColorStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV ) + { + return "{}"; + } + + using PipelineCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) | VkFlags(PipelineCreateFlagBits::eAllowDerivatives) | VkFlags(PipelineCreateFlagBits::eDerivative) | VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) | VkFlags(PipelineCreateFlagBits::eDispatchBase) | VkFlags(PipelineCreateFlagBits::eDeferCompileNV) | VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) | VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | "; + if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PipelineCreationFeedbackFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) | VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineCreationFeedbackFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | "; + if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PipelineDepthStencilStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineDiscardRectangleStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT ) + { + return "{}"; + } + + using PipelineDynamicStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags ) + { + return "{}"; + } + + using PipelineInputAssemblyStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags ) + { + return "{}"; + } + + using PipelineLayoutCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags ) + { + return "{}"; + } + + using PipelineMultisampleStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT ) + { + return "{}"; + } + + enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT ) + { + return "{}"; + } + + using PipelineRasterizationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT ) + { + return "(void)"; + } + + using PipelineRasterizationStateStreamCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT ) + { + return "{}"; + } + + using PipelineShaderStageCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) | VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineShaderStageCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineShaderStageCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | "; + if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PipelineStageFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) | VkFlags(PipelineStageFlagBits::eDrawIndirect) | VkFlags(PipelineStageFlagBits::eVertexInput) | VkFlags(PipelineStageFlagBits::eVertexShader) | VkFlags(PipelineStageFlagBits::eTessellationControlShader) | VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) | VkFlags(PipelineStageFlagBits::eGeometryShader) | VkFlags(PipelineStageFlagBits::eFragmentShader) | VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) | VkFlags(PipelineStageFlagBits::eLateFragmentTests) | VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) | VkFlags(PipelineStageFlagBits::eComputeShader) | VkFlags(PipelineStageFlagBits::eTransfer) | VkFlags(PipelineStageFlagBits::eBottomOfPipe) | VkFlags(PipelineStageFlagBits::eHost) | VkFlags(PipelineStageFlagBits::eAllGraphics) | VkFlags(PipelineStageFlagBits::eAllCommands) | VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) | VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) | VkFlags(PipelineStageFlagBits::eCommandProcessNVX) | VkFlags(PipelineStageFlagBits::eShadingRateImageNV) | VkFlags(PipelineStageFlagBits::eRayTracingShaderNV) | VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildNV) | VkFlags(PipelineStageFlagBits::eTaskShaderNV) | VkFlags(PipelineStageFlagBits::eMeshShaderNV) | VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return PipelineStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( PipelineStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | "; + if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | "; + if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | "; + if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | "; + if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | "; + if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | "; + if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | "; + if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | "; + if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | "; + if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | "; + if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | "; + if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | "; + if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | "; + if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | "; + if ( value & PipelineStageFlagBits::eHost ) result += "Host | "; + if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | "; + if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | "; + if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | "; + if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | "; + if ( value & PipelineStageFlagBits::eCommandProcessNVX ) result += "CommandProcessNVX | "; + if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | "; + if ( value & PipelineStageFlagBits::eRayTracingShaderNV ) result += "RayTracingShaderNV | "; + if ( value & PipelineStageFlagBits::eAccelerationStructureBuildNV ) result += "AccelerationStructureBuildNV | "; + if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | "; + if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using PipelineTessellationStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags ) + { + return "{}"; + } + + using PipelineVertexInputStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags ) + { + return "{}"; + } + + using PipelineViewportStateCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags ) + { + return "{}"; + } + + enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV ) + { + return "(void)"; + } + + using PipelineViewportSwizzleStateCreateFlagsNV = Flags; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV ) + { + return "{}"; + } + + using QueryControlFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(QueryControlFlagBits::ePrecise) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryControlFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryControlFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using QueryPipelineStatisticFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) | VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) | VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) | VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) | VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryPipelineStatisticFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryPipelineStatisticFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | "; + if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | "; + if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | "; + if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | "; + if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using QueryPoolCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags ) + { + return "{}"; + } + + using QueryResultFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(QueryResultFlagBits::e64) | VkFlags(QueryResultFlagBits::eWait) | VkFlags(QueryResultFlagBits::eWithAvailability) | VkFlags(QueryResultFlagBits::ePartial) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueryResultFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueryResultFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & QueryResultFlagBits::e64 ) result += "64 | "; + if ( value & QueryResultFlagBits::eWait ) result += "Wait | "; + if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | "; + if ( value & QueryResultFlagBits::ePartial ) result += "Partial | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using QueueFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(QueueFlagBits::eGraphics) | VkFlags(QueueFlagBits::eCompute) | VkFlags(QueueFlagBits::eTransfer) | VkFlags(QueueFlagBits::eSparseBinding) | VkFlags(QueueFlagBits::eProtected) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return QueueFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( QueueFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( QueueFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & QueueFlagBits::eGraphics ) result += "Graphics | "; + if ( value & QueueFlagBits::eCompute ) result += "Compute | "; + if ( value & QueueFlagBits::eTransfer ) result += "Transfer | "; + if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | "; + if ( value & QueueFlagBits::eProtected ) result += "Protected | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using RenderPassCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags ) + { + return "{}"; + } + + using ResolveModeFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ResolveModeFlagBitsKHR::eNone) | VkFlags(ResolveModeFlagBitsKHR::eSampleZero) | VkFlags(ResolveModeFlagBitsKHR::eAverage) | VkFlags(ResolveModeFlagBitsKHR::eMin) | VkFlags(ResolveModeFlagBitsKHR::eMax) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator|( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator&( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator^( ResolveModeFlagBitsKHR bit0, ResolveModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return ResolveModeFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlagsKHR operator~( ResolveModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ResolveModeFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ResolveModeFlagBitsKHR::eSampleZero ) result += "SampleZero | "; + if ( value & ResolveModeFlagBitsKHR::eAverage ) result += "Average | "; + if ( value & ResolveModeFlagBitsKHR::eMin ) result += "Min | "; + if ( value & ResolveModeFlagBitsKHR::eMax ) result += "Max | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SampleCountFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SampleCountFlagBits::e1) | VkFlags(SampleCountFlagBits::e2) | VkFlags(SampleCountFlagBits::e4) | VkFlags(SampleCountFlagBits::e8) | VkFlags(SampleCountFlagBits::e16) | VkFlags(SampleCountFlagBits::e32) | VkFlags(SampleCountFlagBits::e64) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SampleCountFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SampleCountFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SampleCountFlagBits::e1 ) result += "1 | "; + if ( value & SampleCountFlagBits::e2 ) result += "2 | "; + if ( value & SampleCountFlagBits::e4 ) result += "4 | "; + if ( value & SampleCountFlagBits::e8 ) result += "8 | "; + if ( value & SampleCountFlagBits::e16 ) result += "16 | "; + if ( value & SampleCountFlagBits::e32 ) result += "32 | "; + if ( value & SampleCountFlagBits::e64 ) result += "64 | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SamplerCreateFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) | VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SamplerCreateFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SamplerCreateFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | "; + if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class SemaphoreCreateFlagBits : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits ) + { + return "(void)"; + } + + using SemaphoreCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags ) + { + return "{}"; + } + + using SemaphoreImportFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SemaphoreImportFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SemaphoreImportFlags( bits ) ); + } + + using SemaphoreImportFlagsKHR = SemaphoreImportFlags; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using ShaderCorePropertiesFlagsAMD = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD ) + { + return "{}"; + } + + using ShaderModuleCreateFlags = Flags; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags ) + { + return "{}"; + } + + using ShaderStageFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(ShaderStageFlagBits::eVertex) | VkFlags(ShaderStageFlagBits::eTessellationControl) | VkFlags(ShaderStageFlagBits::eTessellationEvaluation) | VkFlags(ShaderStageFlagBits::eGeometry) | VkFlags(ShaderStageFlagBits::eFragment) | VkFlags(ShaderStageFlagBits::eCompute) | VkFlags(ShaderStageFlagBits::eAllGraphics) | VkFlags(ShaderStageFlagBits::eAll) | VkFlags(ShaderStageFlagBits::eRaygenNV) | VkFlags(ShaderStageFlagBits::eAnyHitNV) | VkFlags(ShaderStageFlagBits::eClosestHitNV) | VkFlags(ShaderStageFlagBits::eMissNV) | VkFlags(ShaderStageFlagBits::eIntersectionNV) | VkFlags(ShaderStageFlagBits::eCallableNV) | VkFlags(ShaderStageFlagBits::eTaskNV) | VkFlags(ShaderStageFlagBits::eMeshNV) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return ShaderStageFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( ShaderStageFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | "; + if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | "; + if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | "; + if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | "; + if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | "; + if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | "; + if ( value & ShaderStageFlagBits::eRaygenNV ) result += "RaygenNV | "; + if ( value & ShaderStageFlagBits::eAnyHitNV ) result += "AnyHitNV | "; + if ( value & ShaderStageFlagBits::eClosestHitNV ) result += "ClosestHitNV | "; + if ( value & ShaderStageFlagBits::eMissNV ) result += "MissNV | "; + if ( value & ShaderStageFlagBits::eIntersectionNV ) result += "IntersectionNV | "; + if ( value & ShaderStageFlagBits::eCallableNV ) result += "CallableNV | "; + if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | "; + if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SparseImageFormatFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) | VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) | VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseImageFormatFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SparseImageFormatFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | "; + if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | "; + if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SparseMemoryBindFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SparseMemoryBindFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SparseMemoryBindFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using StencilFaceFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(StencilFaceFlagBits::eFront) | VkFlags(StencilFaceFlagBits::eBack) | VkFlags(StencilFaceFlagBits::eFrontAndBack) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return StencilFaceFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( StencilFaceFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & StencilFaceFlagBits::eFront ) result += "Front | "; + if ( value & StencilFaceFlagBits::eBack ) result += "Back | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + +#ifdef VK_USE_PLATFORM_GGP + enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP ) + { + return "(void)"; + } + + using StreamDescriptorSurfaceCreateFlagsGGP = Flags; + + VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_GGP*/ + + using SubgroupFeatureFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) | VkFlags(SubgroupFeatureFlagBits::eVote) | VkFlags(SubgroupFeatureFlagBits::eArithmetic) | VkFlags(SubgroupFeatureFlagBits::eBallot) | VkFlags(SubgroupFeatureFlagBits::eShuffle) | VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) | VkFlags(SubgroupFeatureFlagBits::eClustered) | VkFlags(SubgroupFeatureFlagBits::eQuad) | VkFlags(SubgroupFeatureFlagBits::ePartitionedNV) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubgroupFeatureFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SubgroupFeatureFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | "; + if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | "; + if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | "; + if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | "; + if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | "; + if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | "; + if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | "; + if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SubpassDescriptionFlags = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) | VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT + { + return SubpassDescriptionFlags( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SubpassDescriptionFlags( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | "; + if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SurfaceCounterFlagsEXT = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceCounterFlagsEXT( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceCounterFlagsEXT( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SurfaceTransformFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) | VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) | VkFlags(SurfaceTransformFlagBitsKHR::eInherit) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SurfaceTransformFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SurfaceTransformFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | "; + if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | "; + if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + using SwapchainCreateFlagsKHR = Flags; + + template <> struct FlagTraits + { + enum : VkFlags + { + allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) | VkFlags(SwapchainCreateFlagBitsKHR::eProtected) | VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat) + }; + }; + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) | bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) & bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT + { + return SwapchainCreateFlagsKHR( bit0 ) ^ bit1; + } + + VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT + { + return ~( SwapchainCreateFlagsKHR( bits ) ); + } + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value ) + { + if ( !value ) return "{}"; + std::string result; + + if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | "; + if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | "; + if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | "; + return "{ " + result.substr(0, result.size() - 3) + " }"; + } + + enum class ValidationCacheCreateFlagBitsEXT : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT ) + { + return "(void)"; + } + + using ValidationCacheCreateFlagsEXT = Flags; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT ) + { + return "{}"; + } + +#ifdef VK_USE_PLATFORM_VI_NN + enum class ViSurfaceCreateFlagBitsNN : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN ) + { + return "(void)"; + } + + using ViSurfaceCreateFlagsNN = Flags; + + VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using WaylandSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + enum class Win32SurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using Win32SurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + enum class XcbSurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using XcbSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + enum class XlibSurfaceCreateFlagBitsKHR : VkFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR ) + { + return "(void)"; + } + + using XlibSurfaceCreateFlagsKHR = Flags; + + VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR ) + { + return "{}"; + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/Vk.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/Vk.hpp new file mode 100644 index 00000000..cca9c0b2 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/Vk.hpp @@ -0,0 +1,185 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + template + VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateInstance( reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pInstance ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstance" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) + { + VULKAN_HPP_NAMESPACE::Instance instance; + Result result = static_cast( d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); + + ObjectDestroy deleter( allocator, d ); + return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING"::createInstanceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d ) + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d) VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateInstanceVersion( pApiVersion ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d ) + { + uint32_t apiVersion; + Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); + return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceVersion" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkAccelerationStructureNV.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkAccelerationStructureNV.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkAccelerationStructureNV.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBuffer.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBuffer.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBuffer.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBufferView.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBufferView.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkBufferView.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandBuffer.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandBuffer.hpp new file mode 100644 index 00000000..75394c45 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandBuffer.hpp @@ -0,0 +1,1393 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + template + VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::begin" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( &conditionalRenderingBegin ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( &renderPassBegin ), static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast( &renderPassBegin ), reinterpret_cast( &subpassBeginInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); +#else + if ( counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSetCount, reinterpret_cast( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, descriptorSets.size() , reinterpret_cast( descriptorSets.data() ), dynamicOffsets.size() , dynamicOffsets.data() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ), reinterpret_cast( pSizes ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == sizes.size() ); +#else + if ( buffers.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( offsets.size() == sizes.size() ); +#else + if ( offsets.size() != sizes.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindTransformFeedbackBuffersEXT: offsets.size() != sizes.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ), reinterpret_cast( sizes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); +#else + if ( buffers.size() != offsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size() , reinterpret_cast( buffers.data() ), reinterpret_cast( offsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ), static_cast( filter ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBlitImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ), static_cast( filter ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast( &info ), static_cast( instanceData ), static_cast( instanceOffset ), static_cast( update ), static_cast( dst ), static_cast( src ), static_cast( scratch ), static_cast( scratchOffset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast( pAttachments ), rectCount, reinterpret_cast( pRects ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearAttachments( m_commandBuffer, attachments.size() , reinterpret_cast( attachments.data() ), rects.size() , reinterpret_cast( rects.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearColorImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &color ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), rangeCount, reinterpret_cast( pRanges ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast( image ), static_cast( imageLayout ), reinterpret_cast( &depthStencil ), ranges.size() , reinterpret_cast( ranges.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast( dst ), static_cast( src ), static_cast( mode ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBuffer( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( stride ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( &markerInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast( counterBuffer ), static_cast( counterBufferOffset ), counterOffset, vertexStride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), static_cast( countBufferOffset ), maxDrawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass( m_commandBuffer ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), reinterpret_cast( pCounterBufferOffsets ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( counterBuffers.size() == counterBufferOffsets.size() ); +#else + if ( counterBuffers.size() != counterBufferOffsets.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkCommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size() , reinterpret_cast( counterBuffers.data() ), reinterpret_cast( counterBufferOffsets.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast( &subpassBeginInfo ), reinterpret_cast( &subpassEndInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPipelineBarrier( m_commandBuffer, static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( pProcessCommandsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdProcessCommandsNVX( m_commandBuffer, reinterpret_cast( &processCommandsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, values.size() * sizeof( T ) , reinterpret_cast( values.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( pReserveSpaceInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdReserveSpaceForCommandsNVX( m_commandBuffer, reinterpret_cast( &reserveSpaceInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regionCount, reinterpret_cast( pRegions ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdResolveImage( m_commandBuffer, static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), static_cast( dstImageLayout ), regions.size() , reinterpret_cast( regions.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast( sampleOrderType ), customSampleOrders.size() , reinterpret_cast( customSampleOrders.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size() , reinterpret_cast( discardRectangles.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size() , reinterpret_cast( exclusiveScissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceOverrideINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( &sampleLocationsInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size() , reinterpret_cast( scissors.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size() , reinterpret_cast( viewports.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size() , reinterpret_cast( shadingRatePalettes.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size() , reinterpret_cast( viewportWScalings.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdTraceRaysNV( m_commandBuffer, static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), static_cast( missShaderBindingOffset ), static_cast( missShaderBindingStride ), static_cast( hitShaderBindingTableBuffer ), static_cast( hitShaderBindingOffset ), static_cast( hitShaderBindingStride ), static_cast( callableShaderBindingTableBuffer ), static_cast( callableShaderBindingOffset ), static_cast( callableShaderBindingStride ), width, height, depth ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdUpdateBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarrierCount, reinterpret_cast( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast( pImageMemoryBarriers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWaitEvents( m_commandBuffer, events.size() , reinterpret_cast( events.data() ), static_cast( srcStageMask ), static_cast( dstStageMask ), memoryBarriers.size() , reinterpret_cast( memoryBarriers.data() ), bufferMemoryBarriers.size() , reinterpret_cast( bufferMemoryBarriers.data() ), imageMemoryBarriers.size() , reinterpret_cast( imageMemoryBarriers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size() , reinterpret_cast( accelerationStructures.data() ), static_cast( queryType ), static_cast( queryPool ), firstQuery ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#else + template + VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::end(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::end(Dispatch const &d ) const + { + Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::end" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::CommandBuffer::reset" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandPool.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandPool.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkCommandPool.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugReportCallbackEXT.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugReportCallbackEXT.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugReportCallbackEXT.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugUtilsMessengerEXT.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugUtilsMessengerEXT.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDebugUtilsMessengerEXT.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorPool.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorPool.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorPool.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSet.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSet.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSet.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSetLayout.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSetLayout.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorSetLayout.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorUpdateTemplate.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorUpdateTemplate.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDescriptorUpdateTemplate.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDevice.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDevice.hpp new file mode 100644 index 00000000..eb237998 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDevice.hpp @@ -0,0 +1,3996 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireFullScreenExclusiveModeEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImage2KHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE ResultValue Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + { + uint32_t imageIndex; + Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); + return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING"::Device::acquireNextImageKHR", { Result::eSuccess, Result::eTimeout, Result::eNotReady, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration; + Result result = static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); + return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING"::Device::acquirePerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector commandBuffers( allocateInfo.commandBufferCount, vectorAllocator ); + Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateCommandBuffers" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); + std::vector, Allocator> commandBuffers; + commandBuffers.reserve( allocateInfo.commandBufferCount ); + CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); + Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( CommandBuffer ) <= sizeof( UniqueHandle ), "CommandBuffer is greater than UniqueHandle!" ); + std::vector, Allocator> commandBuffers( vectorAllocator ); + commandBuffers.reserve( allocateInfo.commandBufferCount ); + CommandBuffer* buffer = reinterpret_cast( reinterpret_cast( commandBuffers.data() ) + allocateInfo.commandBufferCount * ( sizeof( UniqueHandle ) - sizeof( CommandBuffer ) ) ); + Result result = static_cast(d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + PoolFree deleter( *this, allocateInfo.commandPool, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector descriptorSets( allocateInfo.descriptorSetCount, vectorAllocator ); + Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateDescriptorSets" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d ) const + { + static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); + std::vector, Allocator> descriptorSets; + descriptorSets.reserve( allocateInfo.descriptorSetCount ); + DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); + Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( DescriptorSet ) <= sizeof( UniqueHandle ), "DescriptorSet is greater than UniqueHandle!" ); + std::vector, Allocator> descriptorSets( vectorAllocator ); + descriptorSets.reserve( allocateInfo.descriptorSetCount ); + DescriptorSet* buffer = reinterpret_cast( reinterpret_cast( descriptorSets.data() ) + allocateInfo.descriptorSetCount * ( sizeof( UniqueHandle ) - sizeof( DescriptorSet ) ) ); + Result result = static_cast(d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemory" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeviceMemory memory; + Result result = static_cast( d.vkAllocateMemory( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); + + ObjectFree deleter( *this, allocator, d ); + return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING"::Device::allocateMemoryUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindAccelerationStructureMemoryNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindBufferMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d ) const + { + Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size() , reinterpret_cast( bindInfos.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::bindImageMemory2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d ) const + { + Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::compileDeferredNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNV" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure; + Result result = static_cast( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING"::Device::createAccelerationStructureNVUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Buffer buffer; + Result result = static_cast( d.vkCreateBuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::BufferView view; + Result result = static_cast( d.vkCreateBufferView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createBufferViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::CommandPool commandPool; + Result result = static_cast( d.vkCreateCommandPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createCommandPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines( vectorAllocator ); + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateComputePipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createComputePipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; + Result result = static_cast( d.vkCreateDescriptorPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; + Result result = static_cast( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorSetLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplate" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; + Result result = static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING"::Device::createDescriptorUpdateTemplateKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateEvent( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createEvent( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEvent" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Event event; + Result result = static_cast( d.vkCreateEvent( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING"::Device::createEventUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFence( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createFence( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFence" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkCreateFence( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::createFenceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebuffer" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; + Result result = static_cast( d.vkCreateFramebuffer( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING"::Device::createFramebufferUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelines" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipeline" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines( vectorAllocator ); + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateGraphicsPipelines( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createGraphicsPipelineUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImage( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createImage( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImage" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Image image; + Result result = static_cast( d.vkCreateImage( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImageView( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageView" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageView view; + Result result = static_cast( d.vkCreateImageView( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING"::Device::createImageViewUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout; + Result result = static_cast( d.vkCreateIndirectCommandsLayoutNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createIndirectCommandsLayoutNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pObjectTable ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVX" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable; + Result result = static_cast( d.vkCreateObjectTableNVX( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &objectTable ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, objectTable, VULKAN_HPP_NAMESPACE_STRING"::Device::createObjectTableNVXUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCache" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; + Result result = static_cast( d.vkCreatePipelineCache( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineCacheUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayout" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; + Result result = static_cast( d.vkCreatePipelineLayout( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING"::Device::createPipelineLayoutUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPool" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::QueryPool queryPool; + Result result = static_cast( d.vkCreateQueryPool( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING"::Device::createQueryPoolUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelines ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size() ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector pipelines( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelinesNV" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNV" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines; + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( Pipeline ) <= sizeof( UniqueHandle ), "Pipeline is greater than UniqueHandle!" ); + std::vector, Allocator> pipelines( vectorAllocator ); + pipelines.reserve( createInfos.size() ); + Pipeline* buffer = reinterpret_cast( reinterpret_cast( pipelines.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( Pipeline ) ) ); + Result result = static_cast(d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator, Dispatch const &d ) const + { + Pipeline pipeline; + Result result = static_cast( d.vkCreateRayTracingPipelinesNV( m_device, static_cast( pipelineCache ), 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING"::Device::createRayTracingPipelineNVUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPassUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::RenderPass renderPass; + Result result = static_cast( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING"::Device::createRenderPass2KHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSampler( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSampler" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Sampler sampler; + Result result = static_cast( d.vkCreateSampler( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversion" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; + Result result = static_cast( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING"::Device::createSamplerYcbcrConversionKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphore" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Semaphore semaphore; + Result result = static_cast( d.vkCreateSemaphore( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING"::Device::createSemaphoreUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModule" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; + Result result = static_cast( d.vkCreateShaderModule( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING"::Device::createShaderModuleUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchains ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + std::vector swapchains( createInfos.size() ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector swapchains( createInfos.size(), vectorAllocator ); + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); + return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Dispatch const &d ) const + { + static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); + std::vector, Allocator> swapchainKHRs; + swapchainKHRs.reserve( createInfos.size() ); + SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); + Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType,Allocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const + { + static_assert( sizeof( SwapchainKHR ) <= sizeof( UniqueHandle ), "SwapchainKHR is greater than UniqueHandle!" ); + std::vector, Allocator> swapchainKHRs( vectorAllocator ); + swapchainKHRs.reserve( createInfos.size() ); + SwapchainKHR* buffer = reinterpret_cast( reinterpret_cast( swapchainKHRs.data() ) + createInfos.size() * ( sizeof( UniqueHandle ) - sizeof( SwapchainKHR ) ) ); + Result result = static_cast(d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size() , reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( buffer ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + ObjectDestroy deleter( *this, allocator, d ); + for ( size_t i=0 ; i( buffer[i], deleter ) ); + } + } + + return createResultValue( result, swapchainKHRs, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSharedSwapchainsKHR( m_device, 1 , reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSharedSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; + Result result = static_cast( d.vkCreateSwapchainKHR( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING"::Device::createSwapchainKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache; + Result result = static_cast( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING"::Device::createValidationCacheEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::debugMarkerSetObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyAccelerationStructureNV( m_device, static_cast( accelerationStructure ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorSetLayout( m_device, static_cast( descriptorSetLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDevice( m_device, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyIndirectCommandsLayoutNVX( m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyObjectTableNVX( m_device, static_cast( objectTable ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySamplerYcbcrConversion( m_device, static_cast( ycbcrConversion ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyValidationCacheEXT( m_device, static_cast( validationCache ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDeviceWaitIdle( m_device ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::displayPowerControlEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::flushMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeCommandBuffers( m_device, static_cast( commandPool ), commandBuffers.size() , reinterpret_cast( commandBuffers.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::freeDescriptorSets" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d ) const + { + Result result = static_cast( d.vkFreeDescriptorSets( m_device, static_cast( descriptorPool ), descriptorSets.size() , reinterpret_cast( descriptorSets.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::free" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements; + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR& memoryRequirements = structureChain.template get(); + d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID& properties = structureChain.template get(); + Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( &properties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() ); +#else + if ( timestampInfos.size() != timestamps.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + uint64_t maxDeviation; + Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); + return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return support; + } + template + VULKAN_HPP_INLINE StructureChain Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport& support = structureChain.template get(); + d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast( &createInfo ), reinterpret_cast( &support ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; + d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( &peerMemoryFeatures ) ); + return peerMemoryFeatures; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupPresentCapabilitiesKHR(Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; + Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); + return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupPresentCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModes2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; + Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); + return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING"::Device::getGroupSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; + d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( &committedMemoryInBytes ) ); + return committedMemoryInBytes; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceProcAddr( m_device, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetDeviceProcAddr( m_device, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Queue queue; + d.vkGetDeviceQueue2( m_device, reinterpret_cast( &queueInfo ), reinterpret_cast( &queue ) ); + return queue; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getEventStatus", { Result::eEventSet, Result::eEventReset } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceStatus", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; + Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getImageDrmFormatModifierPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; + d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return memoryRequirements; + } + template + VULKAN_HPP_INLINE StructureChain Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2& memoryRequirements = structureChain.template get(); + d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast( &info ), reinterpret_cast( &memoryRequirements ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements( m_device, static_cast( image ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements; + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } + template + VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector sparseMemoryRequirements( vectorAllocator ); + uint32_t sparseMemoryRequirementCount; + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, nullptr ); + sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); + d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast( &info ), &sparseMemoryRequirementCount, reinterpret_cast( sparseMemoryRequirements.data() ) ); + return sparseMemoryRequirements; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::SubresourceLayout layout; + d.vkGetImageSubresourceLayout( m_device, static_cast( image ), reinterpret_cast( &subresource ), reinterpret_cast( &layout ) ); + return layout; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( &info ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d ) const + { + struct AHardwareBuffer* buffer; + Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); + return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryAndroidHardwareBufferANDROID" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; + Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); + return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryFdPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties; + Result result = static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); + return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryHostPointerPropertiesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandleNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; + Result result = static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); + return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getMemoryWin32HandlePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector presentationTimings; + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector presentationTimings( vectorAllocator ); + uint32_t presentationTimingCount; + Result result; + do + { + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentationTimingCount ) + { + presentationTimings.resize( presentationTimingCount ); + result = static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast( swapchain ), &presentationTimingCount, reinterpret_cast( presentationTimings.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); + presentationTimings.resize( presentationTimingCount ); + } + return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; + Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); + return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING"::Device::getPerformanceParameterINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + { + std::vector internalRepresentations; + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector internalRepresentations( vectorAllocator ); + uint32_t internalRepresentationCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && internalRepresentationCount ) + { + internalRepresentations.resize( internalRepresentationCount ); + result = static_cast( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast( &executableInfo ), &internalRepresentationCount, reinterpret_cast( internalRepresentations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); + internalRepresentations.resize( internalRepresentationCount ); + } + return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + properties.resize( executableCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t executableCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && executableCount ) + { + properties.resize( executableCount ); + result = static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast( &pipelineInfo ), &executableCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( executableCount <= properties.size() ); + properties.resize( executableCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d ) const + { + std::vector statistics; + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + statistics.resize( statisticCount ); + } + return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector statistics( vectorAllocator ); + uint32_t statisticCount; + Result result; + do + { + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && statisticCount ) + { + statistics.resize( statisticCount ); + result = static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast( &executableInfo ), &statisticCount, reinterpret_cast( statistics.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); + statistics.resize( statisticCount ); + } + return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast( stride ), static_cast( flags ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetQueryPoolResults( m_device, static_cast( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast( data.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; + Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); + return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING"::Device::getRefreshCycleDurationGOOGLE" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( &granularity ) ); + return granularity; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d ) const + { + int fd; + Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); + return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d ) const + { + HANDLE handle; + Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); + return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING"::Device::getSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), pInfoSize, pInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d ) const + { + std::vector info; + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector info( vectorAllocator ); + size_t infoSize; + Result result; + do + { + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && infoSize ) + { + info.resize( infoSize ); + result = static_cast( d.vkGetShaderInfoAMD( m_device, static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), &infoSize, reinterpret_cast( info.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( infoSize <= info.size() ); + info.resize( infoSize ); + } + return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d ) const + { + uint64_t counterValue; + Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); + return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainCounterEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + std::vector swapchainImages; + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector swapchainImages( vectorAllocator ); + uint32_t swapchainImageCount; + Result result; + do + { + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && swapchainImageCount ) + { + swapchainImages.resize( swapchainImageCount ); + result = static_cast( d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); + swapchainImages.resize( swapchainImageCount ); + } + return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainStatusKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d ) const + { + std::vector data; + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector data( vectorAllocator ); + size_t dataSize; + Result result; + do + { + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, nullptr ) ); + if ( ( result == Result::eSuccess ) && dataSize ) + { + data.resize( dataSize ); + result = static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( dataSize <= data.size() ); + data.resize( dataSize ); + } + return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importFenceWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreFdKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::importSemaphoreWin32HandleKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::initializePerformanceApiINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d ) const + { + Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size() , reinterpret_cast( memoryRanges.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::invalidateMappedMemoryRanges" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), ppData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const &d ) const + { + void* pData; + Result result = static_cast( d.vkMapMemory( m_device, static_cast( memory ), static_cast( offset ), static_cast( size ), static_cast( flags ), &pData ) ); + return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING"::Device::mapMemory" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergePipelineCaches" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d ) const + { + Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size() , reinterpret_cast( srcCaches.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::mergeValidationCachesEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerEventEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Fence fence; + Result result = static_cast( d.vkRegisterDisplayEventEXT( m_device, static_cast( display ), reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING"::Device::registerDisplayEventEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( ppObjectTableEntries ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pObjectTableEntries.size() == objectIndices.size() ); +#else + if ( pObjectTableEntries.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::registerObjectsNVX: pObjectTableEntries.size() != objectIndices.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + Result result = static_cast( d.vkRegisterObjectsNVX( m_device, static_cast( objectTable ), pObjectTableEntries.size() , reinterpret_cast( pObjectTableEntries.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::registerObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d ) const + { + Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releaseFullScreenExclusiveModeEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + { + Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::releasePerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetCommandPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetDescriptorPool" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::resetFences( ArrayProxy fences, Dispatch const &d ) const + { + Result result = static_cast( d.vkResetFences( m_device, fences.size() , reinterpret_cast( fences.data() ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::resetFences" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } +#else + template + VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectNameEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setDebugUtilsObjectTagEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d ) const + { + Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::setEvent" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); +#else + if ( swapchains.size() != metadata.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkSetHdrMetadataEXT( m_device, swapchains.size() , reinterpret_cast( swapchains.data() ), reinterpret_cast( metadata.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkUninitializePerformanceApiINTEL( m_device ); + } +#else + template + VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUninitializePerformanceApiINTEL( m_device ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#else + template + VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUnmapMemory( m_device, static_cast( memory ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectCount, reinterpret_cast( pObjectEntryTypes ), pObjectIndices ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( objectEntryTypes.size() == objectIndices.size() ); +#else + if ( objectEntryTypes.size() != objectIndices.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::unregisterObjectsNVX: objectEntryTypes.size() != objectIndices.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + Result result = static_cast( d.vkUnregisterObjectsNVX( m_device, static_cast( objectTable ), objectEntryTypes.size() , reinterpret_cast( objectEntryTypes.data() ), objectIndices.data() ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::unregisterObjectsNVX" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#else + template + VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast( pDescriptorCopies ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkUpdateDescriptorSets( m_device, descriptorWrites.size() , reinterpret_cast( descriptorWrites.data() ), descriptorCopies.size() , reinterpret_cast( descriptorCopies.data() ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d ) const + { + Result result = static_cast( d.vkWaitForFences( m_device, fences.size() , reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::waitForFences", { Result::eSuccess, Result::eTimeout } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDeviceMemory.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDeviceMemory.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDeviceMemory.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayKHR.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayKHR.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayKHR.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayModeKHR.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayModeKHR.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkDisplayModeKHR.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkEvent.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkEvent.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkEvent.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFence.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFence.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFence.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFramebuffer.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFramebuffer.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkFramebuffer.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImage.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImage.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImage.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImageView.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImageView.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkImageView.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkIndirectCommandsLayoutNVX.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkIndirectCommandsLayoutNVX.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkIndirectCommandsLayoutNVX.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkInstance.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkInstance.hpp new file mode 100644 index 00000000..1ef0eceb --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkInstance.hpp @@ -0,0 +1,746 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createAndroidSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; + Result result = static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugReportCallbackEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger; + Result result = static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDebugUtilsMessengerEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createDisplayPlaneSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createHeadlessSurfaceEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createIOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA + template + VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIA" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVK" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMacOSSurfaceMVKUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_METAL_EXT + template + VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXT" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createMetalSurfaceEXTUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#ifdef VK_USE_PLATFORM_GGP + template + VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGP" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createStreamDescriptorSurfaceGGPUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNN" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createViSurfaceNNUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWaylandSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createWin32SurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXcbSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHR" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceKHR surface; + Result result = static_cast( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); + + ObjectDestroy deleter( *this, allocator, d ); + return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING"::Instance::createXlibSurfaceKHRUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d ) const + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( layerPrefix.size() == message.size() ); +#else + if ( layerPrefix.size() != message.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkInstance::debugReportMessageEXT: layerPrefix.size() != message.size()" ); + } +#endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + d.vkDebugReportMessageEXT( m_instance, static_cast( flags ), static_cast( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast( callback ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast( messenger ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroyInstance( m_instance, reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties; + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDeviceGroupProperties( vectorAllocator ); + uint32_t physicalDeviceGroupCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount ) + { + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + result = static_cast( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); + physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); + } + return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Dispatch const &d ) const + { + std::vector physicalDevices; + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector physicalDevices( vectorAllocator ); + uint32_t physicalDeviceCount; + Result result; + do + { + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && physicalDeviceCount ) + { + physicalDevices.resize( physicalDeviceCount ); + result = static_cast( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); + physicalDevices.resize( physicalDeviceCount ); + } + return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetInstanceProcAddr( m_instance, pName ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetInstanceProcAddr( m_instance, name.c_str() ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( &callbackData ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkObjectTableNVX.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkObjectTableNVX.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkObjectTableNVX.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPerformanceConfigurationINTEL.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPerformanceConfigurationINTEL.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPerformanceConfigurationINTEL.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPhysicalDevice.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPhysicalDevice.hpp new file mode 100644 index 00000000..349f00d0 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPhysicalDevice.hpp @@ -0,0 +1,1859 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + { + Display dpy; + Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); + return createResultValue( result, dpy, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::acquireXlibDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDevice" ); + } +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Device device; + Result result = static_cast( d.vkCreateDevice( m_physicalDevice, reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); + + ObjectDestroy deleter( allocator, d ); + return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDeviceUnique", deleter ); + } +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMode ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; + Result result = static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast( display ), reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); + return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::createDisplayModeKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities; + Result result = static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); + return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; + Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); + return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d ) const + { + std::vector displays; + uint32_t displayCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + displays.resize( displayCount ); + } + return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector displays( vectorAllocator ); + uint32_t displayCount; + Result result; + do + { + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && displayCount ) + { + displays.resize( displayCount ); + result = static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( displayCount <= displays.size() ); + displays.resize( displayCount ); + } + return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Dispatch const &d ) const + { + std::vector timeDomains; + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + timeDomains.resize( timeDomainCount ); + } + return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector timeDomains( vectorAllocator ); + uint32_t timeDomainCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && timeDomainCount ) + { + timeDomains.resize( timeDomainCount ); + result = static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); + timeDomains.resize( timeDomainCount ); + } + return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + properties.resize( propertyCount ); + } + return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; + d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast( &externalBufferInfo ), reinterpret_cast( &externalBufferProperties ) ); + return externalBufferProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; + d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast( &externalFenceInfo ), reinterpret_cast( &externalFenceProperties ) ); + return externalFenceProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( pExternalImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); + return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; + d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast( &externalSemaphoreInfo ), reinterpret_cast( &externalSemaphoreProperties ) ); + return externalSemaphoreProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; + d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return features; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFeatures2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2& features = structureChain.template get(); + d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( &features ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; + d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return formatProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::FormatProperties2& formatProperties = structureChain.template get(); + d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( &formatProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( pFeatures ), reinterpret_cast( pLimits ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX PhysicalDevice::getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX limits; + d.vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( m_physicalDevice, reinterpret_cast( &features ), reinterpret_cast( &limits ) ); + return limits; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( tiling ), static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::ImageFormatProperties2& imageFormatProperties = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return memoryProperties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getMemoryProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2& memoryProperties = structureChain.template get(); + d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( &memoryProperties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties; + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast( samples ), reinterpret_cast( &multisampleProperties ) ); + return multisampleProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + std::vector rects; + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector rects( vectorAllocator ); + uint32_t rectCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && rectCount ) + { + rects.resize( rectCount ); + result = static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( rectCount <= rects.size() ); + rects.resize( rectCount ); + } + return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; + d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return properties; + } + template + VULKAN_HPP_INLINE StructureChain PhysicalDevice::getProperties2KHR(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2& properties = structureChain.template get(); + d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( &properties ) ); + return structureChain; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + std::vector localVector( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + queueFamilyProperties[i].template get() = localVector[i]; + } + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + std::vector localVector( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + queueFamilyProperties[i].template get() = localVector[i]; + } + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( queueFamilyProperties.data() ) ); + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Dispatch const &d ) const + { + std::vector queueFamilyProperties; + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + std::vector localVector( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + queueFamilyProperties[i].template get() = localVector[i]; + } + return queueFamilyProperties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector queueFamilyProperties( vectorAllocator ); + uint32_t queueFamilyPropertyCount; + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); + queueFamilyProperties.resize( queueFamilyPropertyCount ); + std::vector localVector( queueFamilyPropertyCount ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + localVector[i].pNext = queueFamilyProperties[i].template get().pNext; + } + d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast( localVector.data() ) ); + for ( uint32_t i = 0; i < queueFamilyPropertyCount ; i++ ) + { + queueFamilyProperties[i].template get() = localVector[i]; + } + return queueFamilyProperties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast( format ), static_cast( type ), static_cast( samples ), static_cast( usage ), static_cast( tiling ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d ) const + { + std::vector properties; + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } + template + VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector properties( vectorAllocator ); + uint32_t propertyCount; + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, nullptr ); + properties.resize( propertyCount ); + d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast( &formatInfo ), &propertyCount, reinterpret_cast( properties.data() ) ); + return properties; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d ) const + { + std::vector combinations; + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + combinations.resize( combinationCount ); + } + return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector combinations( vectorAllocator ); + uint32_t combinationCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && combinationCount ) + { + combinations.resize( combinationCount ); + result = static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); + combinations.resize( combinationCount ); + } + return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR& surfaceCapabilities = structureChain.template get(); + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); + return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + std::vector surfaceFormats; + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector surfaceFormats( vectorAllocator ); + uint32_t surfaceFormatCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && surfaceFormatCount ) + { + surfaceFormats.resize( surfaceFormatCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); + surfaceFormats.resize( surfaceFormatCount ); + } + return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + } + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector presentModes( vectorAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast( &surfaceInfo ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + } + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + std::vector presentModes; + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + } + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + } + template + VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector presentModes( vectorAllocator ); + uint32_t presentModeCount; + Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, nullptr ) ); + if ( ( result == Result::eSuccess ) && presentModeCount ) + { + presentModes.resize( presentModeCount ); + result = static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); + } + } while ( result == Result::eIncomplete ); + if ( result == Result::eSuccess ) + { + VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); + presentModes.resize( presentModeCount ); + } + return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::Bool32 supported; + Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); + return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceSupportKHR" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + } +#else + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d ) const + { + VULKAN_HPP_NAMESPACE::DisplayKHR display; + Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); + return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getRandROutputDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d ) const + { + Result result = static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::releaseDisplayEXT" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipeline.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipeline.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipeline.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineCache.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineCache.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineCache.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineLayout.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineLayout.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkPipelineLayout.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueryPool.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueryPool.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueryPool.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueue.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueue.hpp new file mode 100644 index 00000000..0ffd68d5 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkQueue.hpp @@ -0,0 +1,180 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + + template + VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Dispatch const &d ) const + { + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + return checkpointData; + } + template + VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const + { + std::vector checkpointData( vectorAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + return checkpointData; + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Queue::bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size() , reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::bindSparse" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#else + template + VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT(Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d ) const VULKAN_HPP_NOEXCEPT + { + d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( &labelInfo ) ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::presentKHR", { Result::eSuccess, Result::eSuboptimalKHR } ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::setPerformanceConfigurationINTEL" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + } +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Queue::submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size() , reinterpret_cast( submits.data() ), static_cast( fence ) ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::submit" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE Result Queue::waitIdle(Dispatch const &d) const VULKAN_HPP_NOEXCEPT + { + return static_cast( d.vkQueueWaitIdle( m_queue ) ); + } +#else + template + VULKAN_HPP_INLINE typename ResultValueType::type Queue::waitIdle(Dispatch const &d ) const + { + Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); + return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Queue::waitIdle" ); + } +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkRenderPass.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkRenderPass.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkRenderPass.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSampler.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSampler.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSampler.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSamplerYcbcrConversion.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSamplerYcbcrConversion.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSamplerYcbcrConversion.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSemaphore.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSemaphore.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSemaphore.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkShaderModule.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkShaderModule.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkShaderModule.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSurfaceKHR.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSurfaceKHR.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSurfaceKHR.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSwapchainKHR.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSwapchainKHR.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkSwapchainKHR.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/cmds/VkValidationCacheEXT.hpp b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkValidationCacheEXT.hpp new file mode 100644 index 00000000..bb295a07 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/cmds/VkValidationCacheEXT.hpp @@ -0,0 +1,40 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../structs.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/commands.hpp b/app/libraries/VulkanHpp/vulkan/internal/commands.hpp new file mode 100644 index 00000000..2e9ef411 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/commands.hpp @@ -0,0 +1,74 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "cmds/Vk.hpp" +#include "cmds/VkAccelerationStructureNV.hpp" +#include "cmds/VkDebugUtilsMessengerEXT.hpp" +#include "cmds/VkBuffer.hpp" +#include "cmds/VkSwapchainKHR.hpp" +#include "cmds/VkQueue.hpp" +#include "cmds/VkInstance.hpp" +#include "cmds/VkBufferView.hpp" +#include "cmds/VkCommandPool.hpp" +#include "cmds/VkCommandBuffer.hpp" +#include "cmds/VkShaderModule.hpp" +#include "cmds/VkDebugReportCallbackEXT.hpp" +#include "cmds/VkDescriptorPool.hpp" +#include "cmds/VkDescriptorSet.hpp" +#include "cmds/VkDescriptorSetLayout.hpp" +#include "cmds/VkDescriptorUpdateTemplate.hpp" +#include "cmds/VkSemaphore.hpp" +#include "cmds/VkDevice.hpp" +#include "cmds/VkRenderPass.hpp" +#include "cmds/VkDeviceMemory.hpp" +#include "cmds/VkDisplayKHR.hpp" +#include "cmds/VkPerformanceConfigurationINTEL.hpp" +#include "cmds/VkEvent.hpp" +#include "cmds/VkDisplayModeKHR.hpp" +#include "cmds/VkFence.hpp" +#include "cmds/VkFramebuffer.hpp" +#include "cmds/VkImage.hpp" +#include "cmds/VkImageView.hpp" +#include "cmds/VkIndirectCommandsLayoutNVX.hpp" +#include "cmds/VkObjectTableNVX.hpp" +#include "cmds/VkPhysicalDevice.hpp" +#include "cmds/VkPipeline.hpp" +#include "cmds/VkPipelineCache.hpp" +#include "cmds/VkPipelineLayout.hpp" +#include "cmds/VkQueryPool.hpp" +#include "cmds/VkSampler.hpp" +#include "cmds/VkSamplerYcbcrConversion.hpp" +#include "cmds/VkSurfaceKHR.hpp" +#include "cmds/VkValidationCacheEXT.hpp" diff --git a/app/libraries/VulkanHpp/vulkan/internal/dispatch.hpp b/app/libraries/VulkanHpp/vulkan/internal/dispatch.hpp new file mode 100644 index 00000000..30816f45 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/dispatch.hpp @@ -0,0 +1,1968 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "base.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + class DispatchLoaderDynamic; +#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC) +# if defined(VK_NO_PROTOTYPES) +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1 +# else +# define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0 +# endif +#endif + +#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { DispatchLoaderDynamic defaultDispatchLoaderDynamic; } + extern DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE +# endif +#endif + +#if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 + #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# else +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# endif +#endif + +#if !defined(VK_NO_PROTOTYPES) + class DispatchLoaderStatic + { + public: + VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } + + VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfoKHR* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeNV mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } + + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } + + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfoKHR* pSubpassBeginInfo, const VkSubpassEndInfoKHR* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + } + + void vkCmdProcessCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX* pProcessCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdProcessCommandsNVX( commandBuffer, pProcessCommandsInfo ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + void vkCmdReserveSpaceForCommandsNVX( VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdReserveSpaceForCommandsNVX( commandBuffer, pReserveSpaceInfo ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + VkResult vkCreateIndirectCommandsLayoutNVX( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNVX* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNVX( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + VkResult vkCreateObjectTableNVX( VkDevice device, const VkObjectTableCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkObjectTableNVX* pObjectTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateObjectTableNVX( device, pCreateInfo, pAllocator, pObjectTable ); + } + + VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } + + VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } + + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + void vkDestroyIndirectCommandsLayoutNVX( VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNVX( device, indirectCommandsLayout, pAllocator ); + } + + void vkDestroyObjectTableNVX( VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyObjectTableNVX( device, objectTable, pAllocator ); + } + + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } + + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfoEXT* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } + + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } + + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } + + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } + + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } + + VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterObjectsNVX( device, objectTable, objectCount, ppObjectTableEntries, pObjectIndices ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } + + VkResult vkUnregisterObjectsNVX( VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnregisterObjectsNVX( device, objectTable, objectCount, pObjectEntryTypes, pObjectIndices ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + +#ifdef VK_USE_PLATFORM_IOS_MVK + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_METAL_EXT + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#ifdef VK_USE_PLATFORM_GGP + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_GGP*/ + +#ifdef VK_USE_PLATFORM_VI_NN + VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } + + void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } + + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + void vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX* pFeatures, VkDeviceGeneratedCommandsLimitsNVX* pLimits ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( physicalDevice, pFeatures, pLimits ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } + + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } + }; +#endif +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/dynamicDispatch.hpp b/app/libraries/VulkanHpp/vulkan/internal/dynamicDispatch.hpp new file mode 100644 index 00000000..cfa33586 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/dynamicDispatch.hpp @@ -0,0 +1,1640 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "commands.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_GGP + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_GGP*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + template <> struct isStructureChainValid{ enum { value = true }; }; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + template <> struct isStructureChainValid{ enum { value = true }; }; + template <> struct isStructureChainValid{ enum { value = true }; }; + +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader + { + public: +#ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader() VULKAN_HPP_NOEXCEPT : m_success( false ) +#else + DynamicLoader() : m_success( false ) +#endif + { +#if defined(__linux__) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); +#elif defined(__APPLE__) + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); +#elif defined(_WIN32) + m_library = LoadLibrary( TEXT( "vulkan-1.dll" ) ); +#else + VULKAN_HPP_ASSERT( false && "unsupported platform" ); +#endif + + m_success = m_library != 0; +#ifndef VULKAN_HPP_NO_EXCEPTIONS + if ( !m_success ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } +#endif + } + + DynamicLoader( DynamicLoader const& ) = delete; + + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + : m_success(other.m_success) + , m_library(other.m_library) + { + other.m_library = nullptr; + } + + DynamicLoader &operator=( DynamicLoader const& ) = delete; + + DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + m_success = other.m_success; + std::swap(m_library, other.m_library); + return *this; + } + + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +#if defined(__linux__) || defined(__APPLE__) + dlclose( m_library ); +#elif defined(_WIN32) + FreeLibrary( m_library ); +#endif + } + } + + template + T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT + { +#if defined(__linux__) || defined(__APPLE__) + return (T)dlsym( m_library, function ); +#elif defined(_WIN32) + return (T)GetProcAddress( m_library, function ); +#endif + } + + bool success() const VULKAN_HPP_NOEXCEPT { return m_success; } + + private: + bool m_success; +#if defined(__linux__) || defined(__APPLE__) + void *m_library; +#elif defined(_WIN32) + HMODULE m_library; +#else +#error unsupported platform +#endif + }; +#endif + + class DispatchLoaderDynamic + { + public: + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdProcessCommandsNVX vkCmdProcessCommandsNVX = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; + PFN_vkCmdReserveSpaceForCommandsNVX vkCmdReserveSpaceForCommandsNVX = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkCreateIndirectCommandsLayoutNVX vkCreateIndirectCommandsLayoutNVX = 0; + PFN_vkCreateObjectTableNVX vkCreateObjectTableNVX = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkDestroyIndirectCommandsLayoutNVX vkDestroyIndirectCommandsLayoutNVX = 0; + PFN_vkDestroyObjectTableNVX vkDestroyObjectTableNVX = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkRegisterObjectsNVX vkRegisterObjectsNVX = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkUnregisterObjectsNVX vkUnregisterObjectsNVX = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkWaitForFences vkWaitForFences = 0; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; +#ifdef VK_USE_PLATFORM_IOS_MVK + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#ifdef VK_USE_PLATFORM_FUCHSIA + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#ifdef VK_USE_PLATFORM_MACOS_MVK + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_METAL_EXT + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#ifdef VK_USE_PLATFORM_GGP + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#endif /*VK_USE_PLATFORM_GGP*/ +#ifdef VK_USE_PLATFORM_VI_NN + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + +#if !defined(VK_NO_PROTOTYPES) + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. + template + void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress("vkGetInstanceProcAddr"); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress("vkGetDeviceProcAddr"); + init(static_cast(instance), getInstanceProcAddr, static_cast(device), device ? getDeviceProcAddr : nullptr); + } + + // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library. + template + void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init(instance, device, dl); + } +#endif // !defined(VK_NO_PROTOTYPES) + + DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT + { + init(getInstanceProcAddr); + } + + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT(getInstanceProcAddr); + + vkGetInstanceProcAddr = getInstanceProcAddr; + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } + + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } + + // This interface does not require a linked vulkan library. + void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT(instance && getInstanceProcAddr); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance(instance) ); + if (device) { + init( VULKAN_HPP_NAMESPACE::Device(device) ); + } + } + + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast(instanceCpp); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); +#ifdef VK_USE_PLATFORM_IOS_MVK + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +#ifdef VK_USE_PLATFORM_FUCHSIA + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ +#ifdef VK_USE_PLATFORM_MACOS_MVK + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +#ifdef VK_USE_PLATFORM_METAL_EXT + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ +#ifdef VK_USE_PLATFORM_GGP + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ +#ifdef VK_USE_PLATFORM_VI_NN + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX = PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX" ) ); + vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_XCB_KHR + vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdProcessCommandsNVX" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetInstanceProcAddr( instance, "vkCmdReserveSpaceForCommandsNVX" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNVX" ) ); + vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetInstanceProcAddr( instance, "vkCreateObjectTableNVX" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNVX" ) ); + vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetInstanceProcAddr( instance, "vkDestroyObjectTableNVX" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkRegisterObjectsNVX" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetInstanceProcAddr( instance, "vkUnregisterObjectsNVX" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + } + + void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT + { + VkDevice device = static_cast(deviceCpp); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdProcessCommandsNVX = PFN_vkCmdProcessCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdProcessCommandsNVX" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + vkCmdReserveSpaceForCommandsNVX = PFN_vkCmdReserveSpaceForCommandsNVX( vkGetDeviceProcAddr( device, "vkCmdReserveSpaceForCommandsNVX" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkCreateIndirectCommandsLayoutNVX = PFN_vkCreateIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNVX" ) ); + vkCreateObjectTableNVX = PFN_vkCreateObjectTableNVX( vkGetDeviceProcAddr( device, "vkCreateObjectTableNVX" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkDestroyIndirectCommandsLayoutNVX = PFN_vkDestroyIndirectCommandsLayoutNVX( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNVX" ) ); + vkDestroyObjectTableNVX = PFN_vkDestroyObjectTableNVX( vkGetDeviceProcAddr( device, "vkDestroyObjectTableNVX" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); +#ifdef VK_USE_PLATFORM_ANDROID_KHR + vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkRegisterObjectsNVX = PFN_vkRegisterObjectsNVX( vkGetDeviceProcAddr( device, "vkRegisterObjectsNVX" ) ); +#ifdef VK_USE_PLATFORM_WIN32_KHR + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkUnregisterObjectsNVX = PFN_vkUnregisterObjectsNVX( vkGetDeviceProcAddr( device, "vkUnregisterObjectsNVX" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + } + }; + +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/enums.hpp b/app/libraries/VulkanHpp/vulkan/internal/enums.hpp new file mode 100644 index 00000000..b3da43b5 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/enums.hpp @@ -0,0 +1,5213 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "objects.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + enum class AccelerationStructureMemoryRequirementsTypeNV + { + eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV, + eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV, + eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object"; + case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch"; + case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch"; + default: return "invalid"; + } + } + + enum class AccelerationStructureTypeNV + { + eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV, + eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV + }; + + VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeNV value ) + { + switch ( value ) + { + case AccelerationStructureTypeNV::eTopLevel : return "TopLevel"; + case AccelerationStructureTypeNV::eBottomLevel : return "BottomLevel"; + default: return "invalid"; + } + } + + enum class AccessFlagBits : VkAccessFlags + { + eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT, + eIndexRead = VK_ACCESS_INDEX_READ_BIT, + eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT, + eUniformRead = VK_ACCESS_UNIFORM_READ_BIT, + eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, + eShaderRead = VK_ACCESS_SHADER_READ_BIT, + eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT, + eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, + eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, + eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT, + eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, + eTransferRead = VK_ACCESS_TRANSFER_READ_BIT, + eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT, + eHostRead = VK_ACCESS_HOST_READ_BIT, + eHostWrite = VK_ACCESS_HOST_WRITE_BIT, + eMemoryRead = VK_ACCESS_MEMORY_READ_BIT, + eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT, + eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT, + eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT, + eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, + eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT, + eCommandProcessReadNVX = VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX, + eCommandProcessWriteNVX = VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX, + eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT, + eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, + eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, + eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value ) + { + switch ( value ) + { + case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead"; + case AccessFlagBits::eIndexRead : return "IndexRead"; + case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead"; + case AccessFlagBits::eUniformRead : return "UniformRead"; + case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead"; + case AccessFlagBits::eShaderRead : return "ShaderRead"; + case AccessFlagBits::eShaderWrite : return "ShaderWrite"; + case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead"; + case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite"; + case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead"; + case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite"; + case AccessFlagBits::eTransferRead : return "TransferRead"; + case AccessFlagBits::eTransferWrite : return "TransferWrite"; + case AccessFlagBits::eHostRead : return "HostRead"; + case AccessFlagBits::eHostWrite : return "HostWrite"; + case AccessFlagBits::eMemoryRead : return "MemoryRead"; + case AccessFlagBits::eMemoryWrite : return "MemoryWrite"; + case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT"; + case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT"; + case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT"; + case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT"; + case AccessFlagBits::eCommandProcessReadNVX : return "CommandProcessReadNVX"; + case AccessFlagBits::eCommandProcessWriteNVX : return "CommandProcessWriteNVX"; + case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT"; + case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV"; + case AccessFlagBits::eAccelerationStructureReadNV : return "AccelerationStructureReadNV"; + case AccessFlagBits::eAccelerationStructureWriteNV : return "AccelerationStructureWriteNV"; + case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT"; + default: return "invalid"; + } + } + + enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags + { + eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value ) + { + switch ( value ) + { + case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias"; + default: return "invalid"; + } + } + + enum class AttachmentLoadOp + { + eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, + eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, + eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value ) + { + switch ( value ) + { + case AttachmentLoadOp::eLoad : return "Load"; + case AttachmentLoadOp::eClear : return "Clear"; + case AttachmentLoadOp::eDontCare : return "DontCare"; + default: return "invalid"; + } + } + + enum class AttachmentStoreOp + { + eStore = VK_ATTACHMENT_STORE_OP_STORE, + eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE + }; + + VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value ) + { + switch ( value ) + { + case AttachmentStoreOp::eStore : return "Store"; + case AttachmentStoreOp::eDontCare : return "DontCare"; + default: return "invalid"; + } + } + + enum class BlendFactor + { + eZero = VK_BLEND_FACTOR_ZERO, + eOne = VK_BLEND_FACTOR_ONE, + eSrcColor = VK_BLEND_FACTOR_SRC_COLOR, + eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR, + eDstColor = VK_BLEND_FACTOR_DST_COLOR, + eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR, + eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA, + eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, + eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA, + eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA, + eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR, + eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR, + eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA, + eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA, + eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE, + eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR, + eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR, + eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA, + eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA + }; + + VULKAN_HPP_INLINE std::string to_string( BlendFactor value ) + { + switch ( value ) + { + case BlendFactor::eZero : return "Zero"; + case BlendFactor::eOne : return "One"; + case BlendFactor::eSrcColor : return "SrcColor"; + case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor"; + case BlendFactor::eDstColor : return "DstColor"; + case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor"; + case BlendFactor::eSrcAlpha : return "SrcAlpha"; + case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha"; + case BlendFactor::eDstAlpha : return "DstAlpha"; + case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha"; + case BlendFactor::eConstantColor : return "ConstantColor"; + case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor"; + case BlendFactor::eConstantAlpha : return "ConstantAlpha"; + case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha"; + case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate"; + case BlendFactor::eSrc1Color : return "Src1Color"; + case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color"; + case BlendFactor::eSrc1Alpha : return "Src1Alpha"; + case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha"; + default: return "invalid"; + } + } + + enum class BlendOp + { + eAdd = VK_BLEND_OP_ADD, + eSubtract = VK_BLEND_OP_SUBTRACT, + eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT, + eMin = VK_BLEND_OP_MIN, + eMax = VK_BLEND_OP_MAX, + eZeroEXT = VK_BLEND_OP_ZERO_EXT, + eSrcEXT = VK_BLEND_OP_SRC_EXT, + eDstEXT = VK_BLEND_OP_DST_EXT, + eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT, + eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT, + eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT, + eDstInEXT = VK_BLEND_OP_DST_IN_EXT, + eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT, + eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT, + eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT, + eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT, + eXorEXT = VK_BLEND_OP_XOR_EXT, + eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT, + eScreenEXT = VK_BLEND_OP_SCREEN_EXT, + eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT, + eDarkenEXT = VK_BLEND_OP_DARKEN_EXT, + eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT, + eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT, + eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT, + eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT, + eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT, + eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT, + eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT, + eInvertEXT = VK_BLEND_OP_INVERT_EXT, + eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT, + eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT, + eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT, + eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT, + eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT, + ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT, + eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT, + eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT, + eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT, + eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT, + eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT, + ePlusEXT = VK_BLEND_OP_PLUS_EXT, + ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT, + ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT, + ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT, + eMinusEXT = VK_BLEND_OP_MINUS_EXT, + eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT, + eContrastEXT = VK_BLEND_OP_CONTRAST_EXT, + eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT, + eRedEXT = VK_BLEND_OP_RED_EXT, + eGreenEXT = VK_BLEND_OP_GREEN_EXT, + eBlueEXT = VK_BLEND_OP_BLUE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOp value ) + { + switch ( value ) + { + case BlendOp::eAdd : return "Add"; + case BlendOp::eSubtract : return "Subtract"; + case BlendOp::eReverseSubtract : return "ReverseSubtract"; + case BlendOp::eMin : return "Min"; + case BlendOp::eMax : return "Max"; + case BlendOp::eZeroEXT : return "ZeroEXT"; + case BlendOp::eSrcEXT : return "SrcEXT"; + case BlendOp::eDstEXT : return "DstEXT"; + case BlendOp::eSrcOverEXT : return "SrcOverEXT"; + case BlendOp::eDstOverEXT : return "DstOverEXT"; + case BlendOp::eSrcInEXT : return "SrcInEXT"; + case BlendOp::eDstInEXT : return "DstInEXT"; + case BlendOp::eSrcOutEXT : return "SrcOutEXT"; + case BlendOp::eDstOutEXT : return "DstOutEXT"; + case BlendOp::eSrcAtopEXT : return "SrcAtopEXT"; + case BlendOp::eDstAtopEXT : return "DstAtopEXT"; + case BlendOp::eXorEXT : return "XorEXT"; + case BlendOp::eMultiplyEXT : return "MultiplyEXT"; + case BlendOp::eScreenEXT : return "ScreenEXT"; + case BlendOp::eOverlayEXT : return "OverlayEXT"; + case BlendOp::eDarkenEXT : return "DarkenEXT"; + case BlendOp::eLightenEXT : return "LightenEXT"; + case BlendOp::eColordodgeEXT : return "ColordodgeEXT"; + case BlendOp::eColorburnEXT : return "ColorburnEXT"; + case BlendOp::eHardlightEXT : return "HardlightEXT"; + case BlendOp::eSoftlightEXT : return "SoftlightEXT"; + case BlendOp::eDifferenceEXT : return "DifferenceEXT"; + case BlendOp::eExclusionEXT : return "ExclusionEXT"; + case BlendOp::eInvertEXT : return "InvertEXT"; + case BlendOp::eInvertRgbEXT : return "InvertRgbEXT"; + case BlendOp::eLineardodgeEXT : return "LineardodgeEXT"; + case BlendOp::eLinearburnEXT : return "LinearburnEXT"; + case BlendOp::eVividlightEXT : return "VividlightEXT"; + case BlendOp::eLinearlightEXT : return "LinearlightEXT"; + case BlendOp::ePinlightEXT : return "PinlightEXT"; + case BlendOp::eHardmixEXT : return "HardmixEXT"; + case BlendOp::eHslHueEXT : return "HslHueEXT"; + case BlendOp::eHslSaturationEXT : return "HslSaturationEXT"; + case BlendOp::eHslColorEXT : return "HslColorEXT"; + case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT"; + case BlendOp::ePlusEXT : return "PlusEXT"; + case BlendOp::ePlusClampedEXT : return "PlusClampedEXT"; + case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT"; + case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT"; + case BlendOp::eMinusEXT : return "MinusEXT"; + case BlendOp::eMinusClampedEXT : return "MinusClampedEXT"; + case BlendOp::eContrastEXT : return "ContrastEXT"; + case BlendOp::eInvertOvgEXT : return "InvertOvgEXT"; + case BlendOp::eRedEXT : return "RedEXT"; + case BlendOp::eGreenEXT : return "GreenEXT"; + case BlendOp::eBlueEXT : return "BlueEXT"; + default: return "invalid"; + } + } + + enum class BlendOverlapEXT + { + eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT, + eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT, + eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value ) + { + switch ( value ) + { + case BlendOverlapEXT::eUncorrelated : return "Uncorrelated"; + case BlendOverlapEXT::eDisjoint : return "Disjoint"; + case BlendOverlapEXT::eConjoint : return "Conjoint"; + default: return "invalid"; + } + } + + enum class BorderColor + { + eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, + eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK, + eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK, + eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK, + eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE, + eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE + }; + + VULKAN_HPP_INLINE std::string to_string( BorderColor value ) + { + switch ( value ) + { + case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack"; + case BorderColor::eIntTransparentBlack : return "IntTransparentBlack"; + case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack"; + case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack"; + case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite"; + case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite"; + default: return "invalid"; + } + } + + enum class BufferCreateFlagBits : VkBufferCreateFlags + { + eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, + eProtected = VK_BUFFER_CREATE_PROTECTED_BIT, + eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value ) + { + switch ( value ) + { + case BufferCreateFlagBits::eSparseBinding : return "SparseBinding"; + case BufferCreateFlagBits::eSparseResidency : return "SparseResidency"; + case BufferCreateFlagBits::eSparseAliased : return "SparseAliased"; + case BufferCreateFlagBits::eProtected : return "Protected"; + case BufferCreateFlagBits::eDeviceAddressCaptureReplayEXT : return "DeviceAddressCaptureReplayEXT"; + default: return "invalid"; + } + } + + enum class BufferUsageFlagBits : VkBufferUsageFlags + { + eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT, + eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV, + eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value ) + { + switch ( value ) + { + case BufferUsageFlagBits::eTransferSrc : return "TransferSrc"; + case BufferUsageFlagBits::eTransferDst : return "TransferDst"; + case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; + case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; + case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer"; + case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer"; + case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer"; + case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer"; + case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer"; + case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; + case BufferUsageFlagBits::eRayTracingNV : return "RayTracingNV"; + case BufferUsageFlagBits::eShaderDeviceAddressEXT : return "ShaderDeviceAddressEXT"; + default: return "invalid"; + } + } + + enum class BufferViewCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits ) + { + return "(void)"; + } + + enum class BuildAccelerationStructureFlagBitsNV : VkBuildAccelerationStructureFlagsNV + { + eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV, + eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV, + ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV, + ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV, + eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsNV value ) + { + switch ( value ) + { + case BuildAccelerationStructureFlagBitsNV::eAllowUpdate : return "AllowUpdate"; + case BuildAccelerationStructureFlagBitsNV::eAllowCompaction : return "AllowCompaction"; + case BuildAccelerationStructureFlagBitsNV::ePreferFastTrace : return "PreferFastTrace"; + case BuildAccelerationStructureFlagBitsNV::ePreferFastBuild : return "PreferFastBuild"; + case BuildAccelerationStructureFlagBitsNV::eLowMemory : return "LowMemory"; + default: return "invalid"; + } + } + + enum class ChromaLocation + { + eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN, + eMidpoint = VK_CHROMA_LOCATION_MIDPOINT + }; + using ChromaLocationKHR = ChromaLocation; + + VULKAN_HPP_INLINE std::string to_string( ChromaLocation value ) + { + switch ( value ) + { + case ChromaLocation::eCositedEven : return "CositedEven"; + case ChromaLocation::eMidpoint : return "Midpoint"; + default: return "invalid"; + } + } + + enum class CoarseSampleOrderTypeNV + { + eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV, + eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV, + ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV, + eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value ) + { + switch ( value ) + { + case CoarseSampleOrderTypeNV::eDefault : return "Default"; + case CoarseSampleOrderTypeNV::eCustom : return "Custom"; + case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor"; + case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor"; + default: return "invalid"; + } + } + + enum class ColorComponentFlagBits : VkColorComponentFlags + { + eR = VK_COLOR_COMPONENT_R_BIT, + eG = VK_COLOR_COMPONENT_G_BIT, + eB = VK_COLOR_COMPONENT_B_BIT, + eA = VK_COLOR_COMPONENT_A_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value ) + { + switch ( value ) + { + case ColorComponentFlagBits::eR : return "R"; + case ColorComponentFlagBits::eG : return "G"; + case ColorComponentFlagBits::eB : return "B"; + case ColorComponentFlagBits::eA : return "A"; + default: return "invalid"; + } + } + + enum class ColorSpaceKHR + { + eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT, + eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT, + eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT, + eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT, + eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, + eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, + eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, + eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD, + eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR, + eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value ) + { + switch ( value ) + { + case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear"; + case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT"; + case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT"; + case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT"; + case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT"; + case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT"; + case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT"; + case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT"; + case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT"; + case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT"; + case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT"; + case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT"; + case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT"; + case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT"; + case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT"; + case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD"; + default: return "invalid"; + } + } + + enum class CommandBufferLevel + { + ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY, + eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value ) + { + switch ( value ) + { + case CommandBufferLevel::ePrimary : return "Primary"; + case CommandBufferLevel::eSecondary : return "Secondary"; + default: return "invalid"; + } + } + + enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags + { + eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value ) + { + switch ( value ) + { + case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources"; + default: return "invalid"; + } + } + + enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags + { + eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT, + eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, + eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value ) + { + switch ( value ) + { + case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit"; + case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue"; + case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse"; + default: return "invalid"; + } + } + + enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags + { + eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT, + eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, + eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value ) + { + switch ( value ) + { + case CommandPoolCreateFlagBits::eTransient : return "Transient"; + case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer"; + case CommandPoolCreateFlagBits::eProtected : return "Protected"; + default: return "invalid"; + } + } + + enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags + { + eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value ) + { + switch ( value ) + { + case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources"; + default: return "invalid"; + } + } + + enum class CompareOp + { + eNever = VK_COMPARE_OP_NEVER, + eLess = VK_COMPARE_OP_LESS, + eEqual = VK_COMPARE_OP_EQUAL, + eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL, + eGreater = VK_COMPARE_OP_GREATER, + eNotEqual = VK_COMPARE_OP_NOT_EQUAL, + eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL, + eAlways = VK_COMPARE_OP_ALWAYS + }; + + VULKAN_HPP_INLINE std::string to_string( CompareOp value ) + { + switch ( value ) + { + case CompareOp::eNever : return "Never"; + case CompareOp::eLess : return "Less"; + case CompareOp::eEqual : return "Equal"; + case CompareOp::eLessOrEqual : return "LessOrEqual"; + case CompareOp::eGreater : return "Greater"; + case CompareOp::eNotEqual : return "NotEqual"; + case CompareOp::eGreaterOrEqual : return "GreaterOrEqual"; + case CompareOp::eAlways : return "Always"; + default: return "invalid"; + } + } + + enum class ComponentSwizzle + { + eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY, + eZero = VK_COMPONENT_SWIZZLE_ZERO, + eOne = VK_COMPONENT_SWIZZLE_ONE, + eR = VK_COMPONENT_SWIZZLE_R, + eG = VK_COMPONENT_SWIZZLE_G, + eB = VK_COMPONENT_SWIZZLE_B, + eA = VK_COMPONENT_SWIZZLE_A + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value ) + { + switch ( value ) + { + case ComponentSwizzle::eIdentity : return "Identity"; + case ComponentSwizzle::eZero : return "Zero"; + case ComponentSwizzle::eOne : return "One"; + case ComponentSwizzle::eR : return "R"; + case ComponentSwizzle::eG : return "G"; + case ComponentSwizzle::eB : return "B"; + case ComponentSwizzle::eA : return "A"; + default: return "invalid"; + } + } + + enum class ComponentTypeNV + { + eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV, + eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV, + eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV, + eSint8 = VK_COMPONENT_TYPE_SINT8_NV, + eSint16 = VK_COMPONENT_TYPE_SINT16_NV, + eSint32 = VK_COMPONENT_TYPE_SINT32_NV, + eSint64 = VK_COMPONENT_TYPE_SINT64_NV, + eUint8 = VK_COMPONENT_TYPE_UINT8_NV, + eUint16 = VK_COMPONENT_TYPE_UINT16_NV, + eUint32 = VK_COMPONENT_TYPE_UINT32_NV, + eUint64 = VK_COMPONENT_TYPE_UINT64_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value ) + { + switch ( value ) + { + case ComponentTypeNV::eFloat16 : return "Float16"; + case ComponentTypeNV::eFloat32 : return "Float32"; + case ComponentTypeNV::eFloat64 : return "Float64"; + case ComponentTypeNV::eSint8 : return "Sint8"; + case ComponentTypeNV::eSint16 : return "Sint16"; + case ComponentTypeNV::eSint32 : return "Sint32"; + case ComponentTypeNV::eSint64 : return "Sint64"; + case ComponentTypeNV::eUint8 : return "Uint8"; + case ComponentTypeNV::eUint16 : return "Uint16"; + case ComponentTypeNV::eUint32 : return "Uint32"; + case ComponentTypeNV::eUint64 : return "Uint64"; + default: return "invalid"; + } + } + + enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR + { + eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, + ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR, + ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR, + eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque"; + case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied"; + case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied"; + case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit"; + default: return "invalid"; + } + } + + enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT + { + eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) + { + switch ( value ) + { + case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted"; + default: return "invalid"; + } + } + + enum class ConservativeRasterizationModeEXT + { + eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT, + eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT, + eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value ) + { + switch ( value ) + { + case ConservativeRasterizationModeEXT::eDisabled : return "Disabled"; + case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate"; + case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate"; + default: return "invalid"; + } + } + + enum class CopyAccelerationStructureModeNV + { + eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV, + eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeNV value ) + { + switch ( value ) + { + case CopyAccelerationStructureModeNV::eClone : return "Clone"; + case CopyAccelerationStructureModeNV::eCompact : return "Compact"; + default: return "invalid"; + } + } + + enum class CoverageModulationModeNV + { + eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV, + eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV, + eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV, + eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value ) + { + switch ( value ) + { + case CoverageModulationModeNV::eNone : return "None"; + case CoverageModulationModeNV::eRgb : return "Rgb"; + case CoverageModulationModeNV::eAlpha : return "Alpha"; + case CoverageModulationModeNV::eRgba : return "Rgba"; + default: return "invalid"; + } + } + + enum class CoverageReductionModeNV + { + eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV, + eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value ) + { + switch ( value ) + { + case CoverageReductionModeNV::eMerge : return "Merge"; + case CoverageReductionModeNV::eTruncate : return "Truncate"; + default: return "invalid"; + } + } + + enum class CullModeFlagBits : VkCullModeFlags + { + eNone = VK_CULL_MODE_NONE, + eFront = VK_CULL_MODE_FRONT_BIT, + eBack = VK_CULL_MODE_BACK_BIT, + eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value ) + { + switch ( value ) + { + case CullModeFlagBits::eNone : return "None"; + case CullModeFlagBits::eFront : return "Front"; + case CullModeFlagBits::eBack : return "Back"; + case CullModeFlagBits::eFrontAndBack : return "FrontAndBack"; + default: return "invalid"; + } + } + + enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT + { + eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT, + eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT, + ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT, + eError = VK_DEBUG_REPORT_ERROR_BIT_EXT, + eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value ) + { + switch ( value ) + { + case DebugReportFlagBitsEXT::eInformation : return "Information"; + case DebugReportFlagBitsEXT::eWarning : return "Warning"; + case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning"; + case DebugReportFlagBitsEXT::eError : return "Error"; + case DebugReportFlagBitsEXT::eDebug : return "Debug"; + default: return "invalid"; + } + } + + enum class DebugReportObjectTypeEXT + { + eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, + eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT, + ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT, + eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT, + eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT, + eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT, + eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT, + eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT, + eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT, + eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT, + eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT, + eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT, + eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT, + eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT, + eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT, + eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT, + ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT, + ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT, + eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT, + ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT, + eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT, + eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT, + eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT, + eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT, + eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT, + eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT, + eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT, + eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT, + eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT, + eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT, + eObjectTableNVX = VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT, + eIndirectCommandsLayoutNVX = VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT, + eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT, + eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT, + eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT, + eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value ) + { + switch ( value ) + { + case DebugReportObjectTypeEXT::eUnknown : return "Unknown"; + case DebugReportObjectTypeEXT::eInstance : return "Instance"; + case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice"; + case DebugReportObjectTypeEXT::eDevice : return "Device"; + case DebugReportObjectTypeEXT::eQueue : return "Queue"; + case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore"; + case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer"; + case DebugReportObjectTypeEXT::eFence : return "Fence"; + case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory"; + case DebugReportObjectTypeEXT::eBuffer : return "Buffer"; + case DebugReportObjectTypeEXT::eImage : return "Image"; + case DebugReportObjectTypeEXT::eEvent : return "Event"; + case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool"; + case DebugReportObjectTypeEXT::eBufferView : return "BufferView"; + case DebugReportObjectTypeEXT::eImageView : return "ImageView"; + case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule"; + case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache"; + case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout"; + case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass"; + case DebugReportObjectTypeEXT::ePipeline : return "Pipeline"; + case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout"; + case DebugReportObjectTypeEXT::eSampler : return "Sampler"; + case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool"; + case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet"; + case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer"; + case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool"; + case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR"; + case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR"; + case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; + case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR"; + case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR"; + case DebugReportObjectTypeEXT::eObjectTableNVX : return "ObjectTableNVX"; + case DebugReportObjectTypeEXT::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; + case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT"; + case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; + case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; + case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV"; + default: return "invalid"; + } + } + + enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT + { + eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT, + eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, + eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT, + eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose"; + case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info"; + case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning"; + case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error"; + default: return "invalid"; + } + } + + enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT + { + eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT, + eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT, + ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value ) + { + switch ( value ) + { + case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General"; + case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation"; + case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance"; + default: return "invalid"; + } + } + + enum class DependencyFlagBits : VkDependencyFlags + { + eByRegion = VK_DEPENDENCY_BY_REGION_BIT, + eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT, + eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT, + eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR, + eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value ) + { + switch ( value ) + { + case DependencyFlagBits::eByRegion : return "ByRegion"; + case DependencyFlagBits::eDeviceGroup : return "DeviceGroup"; + case DependencyFlagBits::eViewLocal : return "ViewLocal"; + default: return "invalid"; + } + } + + enum class DescriptorBindingFlagBitsEXT : VkDescriptorBindingFlagsEXT + { + eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT, + eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT, + ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT, + eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBitsEXT value ) + { + switch ( value ) + { + case DescriptorBindingFlagBitsEXT::eUpdateAfterBind : return "UpdateAfterBind"; + case DescriptorBindingFlagBitsEXT::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending"; + case DescriptorBindingFlagBitsEXT::ePartiallyBound : return "PartiallyBound"; + case DescriptorBindingFlagBitsEXT::eVariableDescriptorCount : return "VariableDescriptorCount"; + default: return "invalid"; + } + } + + enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags + { + eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, + eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet"; + case DescriptorPoolCreateFlagBits::eUpdateAfterBindEXT : return "UpdateAfterBindEXT"; + default: return "invalid"; + } + } + + enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags + { + ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, + eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value ) + { + switch ( value ) + { + case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPoolEXT : return "UpdateAfterBindPoolEXT"; + default: return "invalid"; + } + } + + enum class DescriptorType + { + eSampler = VK_DESCRIPTOR_TYPE_SAMPLER, + eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, + eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, + eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, + eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, + eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, + eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, + eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, + eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, + eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, + eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, + eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, + eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( DescriptorType value ) + { + switch ( value ) + { + case DescriptorType::eSampler : return "Sampler"; + case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler"; + case DescriptorType::eSampledImage : return "SampledImage"; + case DescriptorType::eStorageImage : return "StorageImage"; + case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer"; + case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer"; + case DescriptorType::eUniformBuffer : return "UniformBuffer"; + case DescriptorType::eStorageBuffer : return "StorageBuffer"; + case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic"; + case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic"; + case DescriptorType::eInputAttachment : return "InputAttachment"; + case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT"; + case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV"; + default: return "invalid"; + } + } + + enum class DescriptorUpdateTemplateType + { + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + }; + using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; + + VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value ) + { + switch ( value ) + { + case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet"; + case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR"; + default: return "invalid"; + } + } + + enum class DeviceCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits ) + { + return "(void)"; + } + + enum class DeviceEventTypeEXT + { + eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value ) + { + switch ( value ) + { + case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug"; + default: return "invalid"; + } + } + + enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR + { + eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR, + eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR, + eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR, + eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value ) + { + switch ( value ) + { + case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local"; + case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote"; + case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum"; + case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice"; + default: return "invalid"; + } + } + + enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags + { + eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value ) + { + switch ( value ) + { + case DeviceQueueCreateFlagBits::eProtected : return "Protected"; + default: return "invalid"; + } + } + + enum class DiscardRectangleModeEXT + { + eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, + eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value ) + { + switch ( value ) + { + case DiscardRectangleModeEXT::eInclusive : return "Inclusive"; + case DiscardRectangleModeEXT::eExclusive : return "Exclusive"; + default: return "invalid"; + } + } + + enum class DisplayEventTypeEXT + { + eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value ) + { + switch ( value ) + { + case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut"; + default: return "invalid"; + } + } + + enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR + { + eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, + eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR, + ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR, + ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value ) + { + switch ( value ) + { + case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque"; + case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel"; + case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied"; + default: return "invalid"; + } + } + + enum class DisplayPowerStateEXT + { + eOff = VK_DISPLAY_POWER_STATE_OFF_EXT, + eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT, + eOn = VK_DISPLAY_POWER_STATE_ON_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value ) + { + switch ( value ) + { + case DisplayPowerStateEXT::eOff : return "Off"; + case DisplayPowerStateEXT::eSuspend : return "Suspend"; + case DisplayPowerStateEXT::eOn : return "On"; + default: return "invalid"; + } + } + + enum class DriverIdKHR + { + eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY_KHR, + eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR, + eMesaRadv = VK_DRIVER_ID_MESA_RADV_KHR, + eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR, + eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR, + eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR, + eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR, + eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR, + eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY_KHR, + eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR, + eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY_KHR, + eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( DriverIdKHR value ) + { + switch ( value ) + { + case DriverIdKHR::eAmdProprietary : return "AmdProprietary"; + case DriverIdKHR::eAmdOpenSource : return "AmdOpenSource"; + case DriverIdKHR::eMesaRadv : return "MesaRadv"; + case DriverIdKHR::eNvidiaProprietary : return "NvidiaProprietary"; + case DriverIdKHR::eIntelProprietaryWindows : return "IntelProprietaryWindows"; + case DriverIdKHR::eIntelOpenSourceMESA : return "IntelOpenSourceMESA"; + case DriverIdKHR::eImaginationProprietary : return "ImaginationProprietary"; + case DriverIdKHR::eQualcommProprietary : return "QualcommProprietary"; + case DriverIdKHR::eArmProprietary : return "ArmProprietary"; + case DriverIdKHR::eGoogleSwiftshader : return "GoogleSwiftshader"; + case DriverIdKHR::eGgpProprietary : return "GgpProprietary"; + case DriverIdKHR::eBroadcomProprietary : return "BroadcomProprietary"; + default: return "invalid"; + } + } + + enum class DynamicState + { + eViewport = VK_DYNAMIC_STATE_VIEWPORT, + eScissor = VK_DYNAMIC_STATE_SCISSOR, + eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH, + eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS, + eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS, + eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS, + eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, + eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, + eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE, + eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, + eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, + eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT, + eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV, + eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV, + eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( DynamicState value ) + { + switch ( value ) + { + case DynamicState::eViewport : return "Viewport"; + case DynamicState::eScissor : return "Scissor"; + case DynamicState::eLineWidth : return "LineWidth"; + case DynamicState::eDepthBias : return "DepthBias"; + case DynamicState::eBlendConstants : return "BlendConstants"; + case DynamicState::eDepthBounds : return "DepthBounds"; + case DynamicState::eStencilCompareMask : return "StencilCompareMask"; + case DynamicState::eStencilWriteMask : return "StencilWriteMask"; + case DynamicState::eStencilReference : return "StencilReference"; + case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV"; + case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT"; + case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT"; + case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV"; + case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV"; + case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV"; + case DynamicState::eLineStippleEXT : return "LineStippleEXT"; + default: return "invalid"; + } + } + + enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags + { + eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT + }; + using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalFenceFeatureFlagBits::eExportable : return "Exportable"; + case ExternalFenceFeatureFlagBits::eImportable : return "Importable"; + default: return "invalid"; + } + } + + enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; + case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; + case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd"; + default: return "invalid"; + } + } + + enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT + }; + using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable"; + case ExternalMemoryFeatureFlagBits::eImportable : return "Importable"; + default: return "invalid"; + } + } + + enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV + { + eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV, + eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV, + eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly"; + case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable"; + case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable"; + default: return "invalid"; + } + } + + enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT, + eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, + eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT, + eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT + }; + using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture"; + case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap"; + case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource"; + case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT"; + case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID"; + case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT"; + case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT"; + default: return "invalid"; + } + } + + enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV + { + eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV, + eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV, + eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV, + eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value ) + { + switch ( value ) + { + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32"; + case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image"; + case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt"; + default: return "invalid"; + } + } + + enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags + { + eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT + }; + using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable"; + case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable"; + default: return "invalid"; + } + } + + enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags + { + eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT + }; + using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits; + + VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value ) + { + switch ( value ) + { + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32"; + case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt"; + case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence"; + case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd"; + default: return "invalid"; + } + } + + enum class FenceCreateFlagBits : VkFenceCreateFlags + { + eSignaled = VK_FENCE_CREATE_SIGNALED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value ) + { + switch ( value ) + { + case FenceCreateFlagBits::eSignaled : return "Signaled"; + default: return "invalid"; + } + } + + enum class FenceImportFlagBits : VkFenceImportFlags + { + eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT + }; + using FenceImportFlagBitsKHR = FenceImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value ) + { + switch ( value ) + { + case FenceImportFlagBits::eTemporary : return "Temporary"; + default: return "invalid"; + } + } + + enum class Filter + { + eNearest = VK_FILTER_NEAREST, + eLinear = VK_FILTER_LINEAR, + eCubicIMG = VK_FILTER_CUBIC_IMG, + eCubicEXT = VK_FILTER_CUBIC_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( Filter value ) + { + switch ( value ) + { + case Filter::eNearest : return "Nearest"; + case Filter::eLinear : return "Linear"; + case Filter::eCubicIMG : return "CubicIMG"; + default: return "invalid"; + } + } + + enum class Format + { + eUndefined = VK_FORMAT_UNDEFINED, + eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8, + eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16, + eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16, + eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16, + eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16, + eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16, + eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16, + eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16, + eR8Unorm = VK_FORMAT_R8_UNORM, + eR8Snorm = VK_FORMAT_R8_SNORM, + eR8Uscaled = VK_FORMAT_R8_USCALED, + eR8Sscaled = VK_FORMAT_R8_SSCALED, + eR8Uint = VK_FORMAT_R8_UINT, + eR8Sint = VK_FORMAT_R8_SINT, + eR8Srgb = VK_FORMAT_R8_SRGB, + eR8G8Unorm = VK_FORMAT_R8G8_UNORM, + eR8G8Snorm = VK_FORMAT_R8G8_SNORM, + eR8G8Uscaled = VK_FORMAT_R8G8_USCALED, + eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED, + eR8G8Uint = VK_FORMAT_R8G8_UINT, + eR8G8Sint = VK_FORMAT_R8G8_SINT, + eR8G8Srgb = VK_FORMAT_R8G8_SRGB, + eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM, + eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM, + eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED, + eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED, + eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT, + eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT, + eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB, + eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM, + eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM, + eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED, + eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED, + eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT, + eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT, + eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB, + eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM, + eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM, + eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED, + eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED, + eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT, + eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT, + eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB, + eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM, + eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM, + eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED, + eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED, + eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT, + eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT, + eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB, + eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32, + eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32, + eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32, + eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32, + eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32, + eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32, + eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32, + eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32, + eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32, + eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32, + eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32, + eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32, + eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32, + eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32, + eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32, + eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32, + eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32, + eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32, + eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32, + eR16Unorm = VK_FORMAT_R16_UNORM, + eR16Snorm = VK_FORMAT_R16_SNORM, + eR16Uscaled = VK_FORMAT_R16_USCALED, + eR16Sscaled = VK_FORMAT_R16_SSCALED, + eR16Uint = VK_FORMAT_R16_UINT, + eR16Sint = VK_FORMAT_R16_SINT, + eR16Sfloat = VK_FORMAT_R16_SFLOAT, + eR16G16Unorm = VK_FORMAT_R16G16_UNORM, + eR16G16Snorm = VK_FORMAT_R16G16_SNORM, + eR16G16Uscaled = VK_FORMAT_R16G16_USCALED, + eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED, + eR16G16Uint = VK_FORMAT_R16G16_UINT, + eR16G16Sint = VK_FORMAT_R16G16_SINT, + eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT, + eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM, + eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM, + eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED, + eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED, + eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT, + eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT, + eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT, + eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM, + eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM, + eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED, + eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED, + eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT, + eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT, + eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT, + eR32Uint = VK_FORMAT_R32_UINT, + eR32Sint = VK_FORMAT_R32_SINT, + eR32Sfloat = VK_FORMAT_R32_SFLOAT, + eR32G32Uint = VK_FORMAT_R32G32_UINT, + eR32G32Sint = VK_FORMAT_R32G32_SINT, + eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT, + eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT, + eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT, + eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT, + eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT, + eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT, + eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT, + eR64Uint = VK_FORMAT_R64_UINT, + eR64Sint = VK_FORMAT_R64_SINT, + eR64Sfloat = VK_FORMAT_R64_SFLOAT, + eR64G64Uint = VK_FORMAT_R64G64_UINT, + eR64G64Sint = VK_FORMAT_R64G64_SINT, + eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT, + eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT, + eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT, + eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT, + eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT, + eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT, + eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT, + eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32, + eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, + eD16Unorm = VK_FORMAT_D16_UNORM, + eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32, + eD32Sfloat = VK_FORMAT_D32_SFLOAT, + eS8Uint = VK_FORMAT_S8_UINT, + eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT, + eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT, + eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT, + eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK, + eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK, + eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK, + eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK, + eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK, + eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK, + eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK, + eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK, + eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK, + eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK, + eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK, + eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK, + eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK, + eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK, + eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK, + eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK, + eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, + eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, + eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, + eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, + eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, + eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, + eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK, + eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK, + eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK, + eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK, + eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK, + eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK, + eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK, + eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK, + eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK, + eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK, + eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK, + eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK, + eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK, + eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK, + eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK, + eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK, + eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK, + eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK, + eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK, + eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK, + eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK, + eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK, + eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK, + eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK, + eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK, + eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK, + eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK, + eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK, + eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK, + eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK, + eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK, + eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, + eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM, + eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM, + eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16, + eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16, + eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM, + eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM, + eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, + ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, + ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, + ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG, + ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG, + ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, + ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, + ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT, + eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT, + eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT, + eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT, + eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT, + eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT, + eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT, + eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT, + eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT, + eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT, + eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT, + eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT, + eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, + eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR, + eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR, + eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR, + eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR, + eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR, + eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR, + eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR, + eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR, + eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR, + eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR, + eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR, + eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR, + eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR, + eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR, + eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR, + eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR, + eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR, + eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR, + eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR, + eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR, + eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR, + eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR, + eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR, + eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR, + eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR, + eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR, + eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR, + eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( Format value ) + { + switch ( value ) + { + case Format::eUndefined : return "Undefined"; + case Format::eR4G4UnormPack8 : return "R4G4UnormPack8"; + case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16"; + case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16"; + case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16"; + case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16"; + case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16"; + case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16"; + case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16"; + case Format::eR8Unorm : return "R8Unorm"; + case Format::eR8Snorm : return "R8Snorm"; + case Format::eR8Uscaled : return "R8Uscaled"; + case Format::eR8Sscaled : return "R8Sscaled"; + case Format::eR8Uint : return "R8Uint"; + case Format::eR8Sint : return "R8Sint"; + case Format::eR8Srgb : return "R8Srgb"; + case Format::eR8G8Unorm : return "R8G8Unorm"; + case Format::eR8G8Snorm : return "R8G8Snorm"; + case Format::eR8G8Uscaled : return "R8G8Uscaled"; + case Format::eR8G8Sscaled : return "R8G8Sscaled"; + case Format::eR8G8Uint : return "R8G8Uint"; + case Format::eR8G8Sint : return "R8G8Sint"; + case Format::eR8G8Srgb : return "R8G8Srgb"; + case Format::eR8G8B8Unorm : return "R8G8B8Unorm"; + case Format::eR8G8B8Snorm : return "R8G8B8Snorm"; + case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled"; + case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled"; + case Format::eR8G8B8Uint : return "R8G8B8Uint"; + case Format::eR8G8B8Sint : return "R8G8B8Sint"; + case Format::eR8G8B8Srgb : return "R8G8B8Srgb"; + case Format::eB8G8R8Unorm : return "B8G8R8Unorm"; + case Format::eB8G8R8Snorm : return "B8G8R8Snorm"; + case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled"; + case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled"; + case Format::eB8G8R8Uint : return "B8G8R8Uint"; + case Format::eB8G8R8Sint : return "B8G8R8Sint"; + case Format::eB8G8R8Srgb : return "B8G8R8Srgb"; + case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm"; + case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm"; + case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled"; + case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled"; + case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint"; + case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint"; + case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb"; + case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm"; + case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm"; + case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled"; + case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled"; + case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint"; + case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint"; + case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb"; + case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32"; + case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32"; + case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32"; + case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32"; + case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32"; + case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32"; + case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32"; + case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32"; + case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32"; + case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32"; + case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32"; + case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32"; + case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32"; + case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32"; + case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32"; + case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32"; + case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32"; + case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32"; + case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32"; + case Format::eR16Unorm : return "R16Unorm"; + case Format::eR16Snorm : return "R16Snorm"; + case Format::eR16Uscaled : return "R16Uscaled"; + case Format::eR16Sscaled : return "R16Sscaled"; + case Format::eR16Uint : return "R16Uint"; + case Format::eR16Sint : return "R16Sint"; + case Format::eR16Sfloat : return "R16Sfloat"; + case Format::eR16G16Unorm : return "R16G16Unorm"; + case Format::eR16G16Snorm : return "R16G16Snorm"; + case Format::eR16G16Uscaled : return "R16G16Uscaled"; + case Format::eR16G16Sscaled : return "R16G16Sscaled"; + case Format::eR16G16Uint : return "R16G16Uint"; + case Format::eR16G16Sint : return "R16G16Sint"; + case Format::eR16G16Sfloat : return "R16G16Sfloat"; + case Format::eR16G16B16Unorm : return "R16G16B16Unorm"; + case Format::eR16G16B16Snorm : return "R16G16B16Snorm"; + case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled"; + case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled"; + case Format::eR16G16B16Uint : return "R16G16B16Uint"; + case Format::eR16G16B16Sint : return "R16G16B16Sint"; + case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat"; + case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm"; + case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm"; + case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled"; + case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled"; + case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint"; + case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint"; + case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat"; + case Format::eR32Uint : return "R32Uint"; + case Format::eR32Sint : return "R32Sint"; + case Format::eR32Sfloat : return "R32Sfloat"; + case Format::eR32G32Uint : return "R32G32Uint"; + case Format::eR32G32Sint : return "R32G32Sint"; + case Format::eR32G32Sfloat : return "R32G32Sfloat"; + case Format::eR32G32B32Uint : return "R32G32B32Uint"; + case Format::eR32G32B32Sint : return "R32G32B32Sint"; + case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat"; + case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint"; + case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint"; + case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat"; + case Format::eR64Uint : return "R64Uint"; + case Format::eR64Sint : return "R64Sint"; + case Format::eR64Sfloat : return "R64Sfloat"; + case Format::eR64G64Uint : return "R64G64Uint"; + case Format::eR64G64Sint : return "R64G64Sint"; + case Format::eR64G64Sfloat : return "R64G64Sfloat"; + case Format::eR64G64B64Uint : return "R64G64B64Uint"; + case Format::eR64G64B64Sint : return "R64G64B64Sint"; + case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat"; + case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint"; + case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint"; + case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat"; + case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32"; + case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32"; + case Format::eD16Unorm : return "D16Unorm"; + case Format::eX8D24UnormPack32 : return "X8D24UnormPack32"; + case Format::eD32Sfloat : return "D32Sfloat"; + case Format::eS8Uint : return "S8Uint"; + case Format::eD16UnormS8Uint : return "D16UnormS8Uint"; + case Format::eD24UnormS8Uint : return "D24UnormS8Uint"; + case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint"; + case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock"; + case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock"; + case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock"; + case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock"; + case Format::eBc2UnormBlock : return "Bc2UnormBlock"; + case Format::eBc2SrgbBlock : return "Bc2SrgbBlock"; + case Format::eBc3UnormBlock : return "Bc3UnormBlock"; + case Format::eBc3SrgbBlock : return "Bc3SrgbBlock"; + case Format::eBc4UnormBlock : return "Bc4UnormBlock"; + case Format::eBc4SnormBlock : return "Bc4SnormBlock"; + case Format::eBc5UnormBlock : return "Bc5UnormBlock"; + case Format::eBc5SnormBlock : return "Bc5SnormBlock"; + case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock"; + case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock"; + case Format::eBc7UnormBlock : return "Bc7UnormBlock"; + case Format::eBc7SrgbBlock : return "Bc7SrgbBlock"; + case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock"; + case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock"; + case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock"; + case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock"; + case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock"; + case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock"; + case Format::eEacR11UnormBlock : return "EacR11UnormBlock"; + case Format::eEacR11SnormBlock : return "EacR11SnormBlock"; + case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock"; + case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock"; + case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock"; + case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock"; + case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock"; + case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock"; + case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock"; + case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock"; + case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock"; + case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock"; + case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock"; + case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock"; + case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock"; + case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock"; + case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock"; + case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock"; + case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock"; + case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock"; + case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock"; + case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock"; + case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock"; + case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock"; + case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock"; + case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock"; + case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock"; + case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock"; + case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock"; + case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock"; + case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock"; + case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock"; + case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm"; + case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm"; + case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm"; + case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm"; + case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm"; + case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm"; + case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm"; + case Format::eR10X6UnormPack16 : return "R10X6UnormPack16"; + case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16"; + case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16"; + case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16"; + case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16"; + case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16"; + case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16"; + case Format::eR12X4UnormPack16 : return "R12X4UnormPack16"; + case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16"; + case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16"; + case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16"; + case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16"; + case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16"; + case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16"; + case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm"; + case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm"; + case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm"; + case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm"; + case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm"; + case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm"; + case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm"; + case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG"; + case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG"; + case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG"; + case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG"; + case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG"; + case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG"; + case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG"; + case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG"; + case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT"; + case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT"; + case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT"; + case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT"; + case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT"; + case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT"; + case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT"; + case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT"; + case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT"; + case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT"; + case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT"; + case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT"; + case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT"; + case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT"; + default: return "invalid"; + } + } + + enum class FormatFeatureFlagBits : VkFormatFeatureFlags + { + eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT, + eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT, + eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT, + eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, + eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT, + eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT, + eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT, + eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT, + eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT, + eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT, + eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT, + eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT, + eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT, + eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, + eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, + eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR, + eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR, + eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR, + eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR, + eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR, + eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR, + eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR, + eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value ) + { + switch ( value ) + { + case FormatFeatureFlagBits::eSampledImage : return "SampledImage"; + case FormatFeatureFlagBits::eStorageImage : return "StorageImage"; + case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic"; + case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer"; + case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic"; + case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer"; + case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment"; + case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend"; + case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; + case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc"; + case FormatFeatureFlagBits::eBlitDst : return "BlitDst"; + case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear"; + case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc"; + case FormatFeatureFlagBits::eTransferDst : return "TransferDst"; + case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit"; + case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable"; + case FormatFeatureFlagBits::eDisjoint : return "Disjoint"; + case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples"; + case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG"; + case FormatFeatureFlagBits::eSampledImageFilterMinmaxEXT : return "SampledImageFilterMinmaxEXT"; + case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; + default: return "invalid"; + } + } + + enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags + { + eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value ) + { + switch ( value ) + { + case FramebufferCreateFlagBits::eImagelessKHR : return "ImagelessKHR"; + default: return "invalid"; + } + } + + enum class FrontFace + { + eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE, + eClockwise = VK_FRONT_FACE_CLOCKWISE + }; + + VULKAN_HPP_INLINE std::string to_string( FrontFace value ) + { + switch ( value ) + { + case FrontFace::eCounterClockwise : return "CounterClockwise"; + case FrontFace::eClockwise : return "Clockwise"; + default: return "invalid"; + } + } + +#ifdef VK_USE_PLATFORM_WIN32_KHR + enum class FullScreenExclusiveEXT + { + eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT, + eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT, + eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT, + eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value ) + { + switch ( value ) + { + case FullScreenExclusiveEXT::eDefault : return "Default"; + case FullScreenExclusiveEXT::eAllowed : return "Allowed"; + case FullScreenExclusiveEXT::eDisallowed : return "Disallowed"; + case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled"; + default: return "invalid"; + } + } +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + enum class GeometryFlagBitsNV : VkGeometryFlagsNV + { + eOpaque = VK_GEOMETRY_OPAQUE_BIT_NV, + eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsNV value ) + { + switch ( value ) + { + case GeometryFlagBitsNV::eOpaque : return "Opaque"; + case GeometryFlagBitsNV::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation"; + default: return "invalid"; + } + } + + enum class GeometryInstanceFlagBitsNV : VkGeometryInstanceFlagsNV + { + eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV, + eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV, + eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV, + eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsNV value ) + { + switch ( value ) + { + case GeometryInstanceFlagBitsNV::eTriangleCullDisable : return "TriangleCullDisable"; + case GeometryInstanceFlagBitsNV::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise"; + case GeometryInstanceFlagBitsNV::eForceOpaque : return "ForceOpaque"; + case GeometryInstanceFlagBitsNV::eForceNoOpaque : return "ForceNoOpaque"; + default: return "invalid"; + } + } + + enum class GeometryTypeNV + { + eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_NV, + eAabbs = VK_GEOMETRY_TYPE_AABBS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( GeometryTypeNV value ) + { + switch ( value ) + { + case GeometryTypeNV::eTriangles : return "Triangles"; + case GeometryTypeNV::eAabbs : return "Aabbs"; + default: return "invalid"; + } + } + + enum class ImageAspectFlagBits : VkImageAspectFlags + { + eColor = VK_IMAGE_ASPECT_COLOR_BIT, + eDepth = VK_IMAGE_ASPECT_DEPTH_BIT, + eStencil = VK_IMAGE_ASPECT_STENCIL_BIT, + eMetadata = VK_IMAGE_ASPECT_METADATA_BIT, + ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT, + ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT, + ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT, + eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT, + eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT, + eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT, + eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT, + ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR, + ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR, + ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value ) + { + switch ( value ) + { + case ImageAspectFlagBits::eColor : return "Color"; + case ImageAspectFlagBits::eDepth : return "Depth"; + case ImageAspectFlagBits::eStencil : return "Stencil"; + case ImageAspectFlagBits::eMetadata : return "Metadata"; + case ImageAspectFlagBits::ePlane0 : return "Plane0"; + case ImageAspectFlagBits::ePlane1 : return "Plane1"; + case ImageAspectFlagBits::ePlane2 : return "Plane2"; + case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT"; + case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT"; + case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT"; + case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT"; + default: return "invalid"; + } + } + + enum class ImageCreateFlagBits : VkImageCreateFlags + { + eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT, + eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT, + eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT, + eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT, + eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, + eAlias = VK_IMAGE_CREATE_ALIAS_BIT, + eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + eProtected = VK_IMAGE_CREATE_PROTECTED_BIT, + eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT, + eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV, + eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT, + eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT, + eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR, + eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR, + eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, + eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR, + eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value ) + { + switch ( value ) + { + case ImageCreateFlagBits::eSparseBinding : return "SparseBinding"; + case ImageCreateFlagBits::eSparseResidency : return "SparseResidency"; + case ImageCreateFlagBits::eSparseAliased : return "SparseAliased"; + case ImageCreateFlagBits::eMutableFormat : return "MutableFormat"; + case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible"; + case ImageCreateFlagBits::eAlias : return "Alias"; + case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; + case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible"; + case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible"; + case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage"; + case ImageCreateFlagBits::eProtected : return "Protected"; + case ImageCreateFlagBits::eDisjoint : return "Disjoint"; + case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV"; + case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT"; + case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; + default: return "invalid"; + } + } + + enum class ImageLayout + { + eUndefined = VK_IMAGE_LAYOUT_UNDEFINED, + eGeneral = VK_IMAGE_LAYOUT_GENERAL, + eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, + eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, + eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL, + eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, + eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, + eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, + ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED, + eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, + eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, + eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR, + eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ImageLayout value ) + { + switch ( value ) + { + case ImageLayout::eUndefined : return "Undefined"; + case ImageLayout::eGeneral : return "General"; + case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal"; + case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal"; + case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal"; + case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal"; + case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal"; + case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal"; + case ImageLayout::ePreinitialized : return "Preinitialized"; + case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal"; + case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal"; + case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR"; + case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR"; + case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV"; + case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT"; + default: return "invalid"; + } + } + + enum class ImageTiling + { + eOptimal = VK_IMAGE_TILING_OPTIMAL, + eLinear = VK_IMAGE_TILING_LINEAR, + eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageTiling value ) + { + switch ( value ) + { + case ImageTiling::eOptimal : return "Optimal"; + case ImageTiling::eLinear : return "Linear"; + case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT"; + default: return "invalid"; + } + } + + enum class ImageType + { + e1D = VK_IMAGE_TYPE_1D, + e2D = VK_IMAGE_TYPE_2D, + e3D = VK_IMAGE_TYPE_3D + }; + + VULKAN_HPP_INLINE std::string to_string( ImageType value ) + { + switch ( value ) + { + case ImageType::e1D : return "1D"; + case ImageType::e2D : return "2D"; + case ImageType::e3D : return "3D"; + default: return "invalid"; + } + } + + enum class ImageUsageFlagBits : VkImageUsageFlags + { + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value ) + { + switch ( value ) + { + case ImageUsageFlagBits::eTransferSrc : return "TransferSrc"; + case ImageUsageFlagBits::eTransferDst : return "TransferDst"; + case ImageUsageFlagBits::eSampled : return "Sampled"; + case ImageUsageFlagBits::eStorage : return "Storage"; + case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment"; + case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment"; + case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment"; + case ImageUsageFlagBits::eInputAttachment : return "InputAttachment"; + case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; + case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT"; + default: return "invalid"; + } + } + + enum class ImageViewCreateFlagBits : VkImageViewCreateFlags + { + eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value ) + { + switch ( value ) + { + case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT"; + default: return "invalid"; + } + } + + enum class ImageViewType + { + e1D = VK_IMAGE_VIEW_TYPE_1D, + e2D = VK_IMAGE_VIEW_TYPE_2D, + e3D = VK_IMAGE_VIEW_TYPE_3D, + eCube = VK_IMAGE_VIEW_TYPE_CUBE, + e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY, + e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY, + eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY + }; + + VULKAN_HPP_INLINE std::string to_string( ImageViewType value ) + { + switch ( value ) + { + case ImageViewType::e1D : return "1D"; + case ImageViewType::e2D : return "2D"; + case ImageViewType::e3D : return "3D"; + case ImageViewType::eCube : return "Cube"; + case ImageViewType::e1DArray : return "1DArray"; + case ImageViewType::e2DArray : return "2DArray"; + case ImageViewType::eCubeArray : return "CubeArray"; + default: return "invalid"; + } + } + + enum class IndexType + { + eUint16 = VK_INDEX_TYPE_UINT16, + eUint32 = VK_INDEX_TYPE_UINT32, + eNoneNV = VK_INDEX_TYPE_NONE_NV, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( IndexType value ) + { + switch ( value ) + { + case IndexType::eUint16 : return "Uint16"; + case IndexType::eUint32 : return "Uint32"; + case IndexType::eNoneNV : return "NoneNV"; + case IndexType::eUint8EXT : return "Uint8EXT"; + default: return "invalid"; + } + } + + enum class IndirectCommandsLayoutUsageFlagBitsNVX : VkIndirectCommandsLayoutUsageFlagsNVX + { + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX, + eSparseSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX, + eEmptyExecutions = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX, + eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNVX value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsNVX::eUnorderedSequences : return "UnorderedSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eSparseSequences : return "SparseSequences"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eEmptyExecutions : return "EmptyExecutions"; + case IndirectCommandsLayoutUsageFlagBitsNVX::eIndexedSequences : return "IndexedSequences"; + default: return "invalid"; + } + } + + enum class IndirectCommandsTokenTypeNVX + { + ePipeline = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, + eDescriptorSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX + }; + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNVX value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeNVX::ePipeline : return "Pipeline"; + case IndirectCommandsTokenTypeNVX::eDescriptorSet : return "DescriptorSet"; + case IndirectCommandsTokenTypeNVX::eIndexBuffer : return "IndexBuffer"; + case IndirectCommandsTokenTypeNVX::eVertexBuffer : return "VertexBuffer"; + case IndirectCommandsTokenTypeNVX::ePushConstant : return "PushConstant"; + case IndirectCommandsTokenTypeNVX::eDrawIndexed : return "DrawIndexed"; + case IndirectCommandsTokenTypeNVX::eDraw : return "Draw"; + case IndirectCommandsTokenTypeNVX::eDispatch : return "Dispatch"; + default: return "invalid"; + } + } + + enum class InstanceCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits ) + { + return "(void)"; + } + + enum class InternalAllocationType + { + eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + }; + + VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value ) + { + switch ( value ) + { + case InternalAllocationType::eExecutable : return "Executable"; + default: return "invalid"; + } + } + + enum class LineRasterizationModeEXT + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value ) + { + switch ( value ) + { + case LineRasterizationModeEXT::eDefault : return "Default"; + case LineRasterizationModeEXT::eRectangular : return "Rectangular"; + case LineRasterizationModeEXT::eBresenham : return "Bresenham"; + case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth"; + default: return "invalid"; + } + } + + enum class LogicOp + { + eClear = VK_LOGIC_OP_CLEAR, + eAnd = VK_LOGIC_OP_AND, + eAndReverse = VK_LOGIC_OP_AND_REVERSE, + eCopy = VK_LOGIC_OP_COPY, + eAndInverted = VK_LOGIC_OP_AND_INVERTED, + eNoOp = VK_LOGIC_OP_NO_OP, + eXor = VK_LOGIC_OP_XOR, + eOr = VK_LOGIC_OP_OR, + eNor = VK_LOGIC_OP_NOR, + eEquivalent = VK_LOGIC_OP_EQUIVALENT, + eInvert = VK_LOGIC_OP_INVERT, + eOrReverse = VK_LOGIC_OP_OR_REVERSE, + eCopyInverted = VK_LOGIC_OP_COPY_INVERTED, + eOrInverted = VK_LOGIC_OP_OR_INVERTED, + eNand = VK_LOGIC_OP_NAND, + eSet = VK_LOGIC_OP_SET + }; + + VULKAN_HPP_INLINE std::string to_string( LogicOp value ) + { + switch ( value ) + { + case LogicOp::eClear : return "Clear"; + case LogicOp::eAnd : return "And"; + case LogicOp::eAndReverse : return "AndReverse"; + case LogicOp::eCopy : return "Copy"; + case LogicOp::eAndInverted : return "AndInverted"; + case LogicOp::eNoOp : return "NoOp"; + case LogicOp::eXor : return "Xor"; + case LogicOp::eOr : return "Or"; + case LogicOp::eNor : return "Nor"; + case LogicOp::eEquivalent : return "Equivalent"; + case LogicOp::eInvert : return "Invert"; + case LogicOp::eOrReverse : return "OrReverse"; + case LogicOp::eCopyInverted : return "CopyInverted"; + case LogicOp::eOrInverted : return "OrInverted"; + case LogicOp::eNand : return "Nand"; + case LogicOp::eSet : return "Set"; + default: return "invalid"; + } + } + + enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags + { + eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT + }; + using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits; + + VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value ) + { + switch ( value ) + { + case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask"; + default: return "invalid"; + } + } + + enum class MemoryHeapFlagBits : VkMemoryHeapFlags + { + eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT, + eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value ) + { + switch ( value ) + { + case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal"; + case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance"; + default: return "invalid"; + } + } + + enum class MemoryOverallocationBehaviorAMD + { + eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD, + eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD, + eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) + { + switch ( value ) + { + case MemoryOverallocationBehaviorAMD::eDefault : return "Default"; + case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed"; + case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed"; + default: return "invalid"; + } + } + + enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags + { + eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, + eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, + eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, + eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT, + eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT, + eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT, + eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD, + eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value ) + { + switch ( value ) + { + case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal"; + case MemoryPropertyFlagBits::eHostVisible : return "HostVisible"; + case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent"; + case MemoryPropertyFlagBits::eHostCached : return "HostCached"; + case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated"; + case MemoryPropertyFlagBits::eProtected : return "Protected"; + case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD"; + case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD"; + default: return "invalid"; + } + } + + enum class ObjectEntryTypeNVX + { + eDescriptorSet = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, + ePipeline = VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX, + eIndexBuffer = VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX, + eVertexBuffer = VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX, + ePushConstant = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX + }; + + VULKAN_HPP_INLINE std::string to_string( ObjectEntryTypeNVX value ) + { + switch ( value ) + { + case ObjectEntryTypeNVX::eDescriptorSet : return "DescriptorSet"; + case ObjectEntryTypeNVX::ePipeline : return "Pipeline"; + case ObjectEntryTypeNVX::eIndexBuffer : return "IndexBuffer"; + case ObjectEntryTypeNVX::eVertexBuffer : return "VertexBuffer"; + case ObjectEntryTypeNVX::ePushConstant : return "PushConstant"; + default: return "invalid"; + } + } + + enum class ObjectEntryUsageFlagBitsNVX : VkObjectEntryUsageFlagsNVX + { + eGraphics = VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX, + eCompute = VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX + }; + + VULKAN_HPP_INLINE std::string to_string( ObjectEntryUsageFlagBitsNVX value ) + { + switch ( value ) + { + case ObjectEntryUsageFlagBitsNVX::eGraphics : return "Graphics"; + case ObjectEntryUsageFlagBitsNVX::eCompute : return "Compute"; + default: return "invalid"; + } + } + + enum class ObjectType + { + eUnknown = VK_OBJECT_TYPE_UNKNOWN, + eInstance = VK_OBJECT_TYPE_INSTANCE, + ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE, + eDevice = VK_OBJECT_TYPE_DEVICE, + eQueue = VK_OBJECT_TYPE_QUEUE, + eSemaphore = VK_OBJECT_TYPE_SEMAPHORE, + eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER, + eFence = VK_OBJECT_TYPE_FENCE, + eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY, + eBuffer = VK_OBJECT_TYPE_BUFFER, + eImage = VK_OBJECT_TYPE_IMAGE, + eEvent = VK_OBJECT_TYPE_EVENT, + eQueryPool = VK_OBJECT_TYPE_QUERY_POOL, + eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW, + eImageView = VK_OBJECT_TYPE_IMAGE_VIEW, + eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE, + ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE, + ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT, + eRenderPass = VK_OBJECT_TYPE_RENDER_PASS, + ePipeline = VK_OBJECT_TYPE_PIPELINE, + eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT, + eSampler = VK_OBJECT_TYPE_SAMPLER, + eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL, + eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET, + eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER, + eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL, + eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR, + eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR, + eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR, + eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR, + eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT, + eObjectTableNVX = VK_OBJECT_TYPE_OBJECT_TABLE_NVX, + eIndirectCommandsLayoutNVX = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX, + eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT, + eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT, + eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV, + ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL, + eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR, + eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ObjectType value ) + { + switch ( value ) + { + case ObjectType::eUnknown : return "Unknown"; + case ObjectType::eInstance : return "Instance"; + case ObjectType::ePhysicalDevice : return "PhysicalDevice"; + case ObjectType::eDevice : return "Device"; + case ObjectType::eQueue : return "Queue"; + case ObjectType::eSemaphore : return "Semaphore"; + case ObjectType::eCommandBuffer : return "CommandBuffer"; + case ObjectType::eFence : return "Fence"; + case ObjectType::eDeviceMemory : return "DeviceMemory"; + case ObjectType::eBuffer : return "Buffer"; + case ObjectType::eImage : return "Image"; + case ObjectType::eEvent : return "Event"; + case ObjectType::eQueryPool : return "QueryPool"; + case ObjectType::eBufferView : return "BufferView"; + case ObjectType::eImageView : return "ImageView"; + case ObjectType::eShaderModule : return "ShaderModule"; + case ObjectType::ePipelineCache : return "PipelineCache"; + case ObjectType::ePipelineLayout : return "PipelineLayout"; + case ObjectType::eRenderPass : return "RenderPass"; + case ObjectType::ePipeline : return "Pipeline"; + case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout"; + case ObjectType::eSampler : return "Sampler"; + case ObjectType::eDescriptorPool : return "DescriptorPool"; + case ObjectType::eDescriptorSet : return "DescriptorSet"; + case ObjectType::eFramebuffer : return "Framebuffer"; + case ObjectType::eCommandPool : return "CommandPool"; + case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion"; + case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate"; + case ObjectType::eSurfaceKHR : return "SurfaceKHR"; + case ObjectType::eSwapchainKHR : return "SwapchainKHR"; + case ObjectType::eDisplayKHR : return "DisplayKHR"; + case ObjectType::eDisplayModeKHR : return "DisplayModeKHR"; + case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT"; + case ObjectType::eObjectTableNVX : return "ObjectTableNVX"; + case ObjectType::eIndirectCommandsLayoutNVX : return "IndirectCommandsLayoutNVX"; + case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT"; + case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT"; + case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV"; + case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL"; + default: return "invalid"; + } + } + + enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags + { + eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT + }; + using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits; + + VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value ) + { + switch ( value ) + { + case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc"; + case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst"; + case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc"; + case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst"; + default: return "invalid"; + } + } + + enum class PerformanceConfigurationTypeINTEL + { + eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value ) + { + switch ( value ) + { + case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated"; + default: return "invalid"; + } + } + + enum class PerformanceOverrideTypeINTEL + { + eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL, + eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value ) + { + switch ( value ) + { + case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware"; + case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches"; + default: return "invalid"; + } + } + + enum class PerformanceParameterTypeINTEL + { + eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL, + eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value ) + { + switch ( value ) + { + case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported"; + case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits"; + default: return "invalid"; + } + } + + enum class PerformanceValueTypeINTEL + { + eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL, + eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL, + eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL, + eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL, + eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value ) + { + switch ( value ) + { + case PerformanceValueTypeINTEL::eUint32 : return "Uint32"; + case PerformanceValueTypeINTEL::eUint64 : return "Uint64"; + case PerformanceValueTypeINTEL::eFloat : return "Float"; + case PerformanceValueTypeINTEL::eBool : return "Bool"; + case PerformanceValueTypeINTEL::eString : return "String"; + default: return "invalid"; + } + } + + enum class PhysicalDeviceType + { + eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER, + eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU, + eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU, + eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU, + eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU + }; + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value ) + { + switch ( value ) + { + case PhysicalDeviceType::eOther : return "Other"; + case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu"; + case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu"; + case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu"; + case PhysicalDeviceType::eCpu : return "Cpu"; + default: return "invalid"; + } + } + + enum class PipelineBindPoint + { + eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS, + eCompute = VK_PIPELINE_BIND_POINT_COMPUTE, + eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value ) + { + switch ( value ) + { + case PipelineBindPoint::eGraphics : return "Graphics"; + case PipelineBindPoint::eCompute : return "Compute"; + case PipelineBindPoint::eRayTracingNV : return "RayTracingNV"; + default: return "invalid"; + } + } + + enum class PipelineCacheCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineCacheHeaderVersion + { + eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value ) + { + switch ( value ) + { + case PipelineCacheHeaderVersion::eOne : return "One"; + default: return "invalid"; + } + } + + enum class PipelineColorBlendStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD ) + { + return "(void)"; + } + + enum class PipelineCreateFlagBits : VkPipelineCreateFlags + { + eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE, + eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, + eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value ) + { + switch ( value ) + { + case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization"; + case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives"; + case PipelineCreateFlagBits::eDerivative : return "Derivative"; + case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase"; + case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV"; + case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR"; + default: return "invalid"; + } + } + + enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT + { + eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT, + eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT, + eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value ) + { + switch ( value ) + { + case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid"; + case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit"; + case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration"; + default: return "invalid"; + } + } + + enum class PipelineDepthStencilStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineDynamicStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineExecutableStatisticFormatKHR + { + eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR, + eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR, + eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR, + eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value ) + { + switch ( value ) + { + case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32"; + case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64"; + case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64"; + case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64"; + default: return "invalid"; + } + } + + enum class PipelineInputAssemblyStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineLayoutCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineMultisampleStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineRasterizationStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags + { + eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT, + eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value ) + { + switch ( value ) + { + case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT"; + case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT"; + default: return "invalid"; + } + } + + enum class PipelineStageFlagBits : VkPipelineStageFlags + { + eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, + eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT, + eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT, + eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT, + eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, + eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, + eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, + eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, + eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, + eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT, + eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + eHost = VK_PIPELINE_STAGE_HOST_BIT, + eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, + eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, + eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT, + eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT, + eCommandProcessNVX = VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX, + eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, + eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value ) + { + switch ( value ) + { + case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe"; + case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect"; + case PipelineStageFlagBits::eVertexInput : return "VertexInput"; + case PipelineStageFlagBits::eVertexShader : return "VertexShader"; + case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader"; + case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader"; + case PipelineStageFlagBits::eGeometryShader : return "GeometryShader"; + case PipelineStageFlagBits::eFragmentShader : return "FragmentShader"; + case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests"; + case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests"; + case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput"; + case PipelineStageFlagBits::eComputeShader : return "ComputeShader"; + case PipelineStageFlagBits::eTransfer : return "Transfer"; + case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe"; + case PipelineStageFlagBits::eHost : return "Host"; + case PipelineStageFlagBits::eAllGraphics : return "AllGraphics"; + case PipelineStageFlagBits::eAllCommands : return "AllCommands"; + case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT"; + case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT"; + case PipelineStageFlagBits::eCommandProcessNVX : return "CommandProcessNVX"; + case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV"; + case PipelineStageFlagBits::eRayTracingShaderNV : return "RayTracingShaderNV"; + case PipelineStageFlagBits::eAccelerationStructureBuildNV : return "AccelerationStructureBuildNV"; + case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV"; + case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT"; + default: return "invalid"; + } + } + + enum class PipelineTessellationStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineVertexInputStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PipelineViewportStateCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits ) + { + return "(void)"; + } + + enum class PointClippingBehavior + { + eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY + }; + using PointClippingBehaviorKHR = PointClippingBehavior; + + VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value ) + { + switch ( value ) + { + case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes"; + case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly"; + default: return "invalid"; + } + } + + enum class PolygonMode + { + eFill = VK_POLYGON_MODE_FILL, + eLine = VK_POLYGON_MODE_LINE, + ePoint = VK_POLYGON_MODE_POINT, + eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV + }; + + VULKAN_HPP_INLINE std::string to_string( PolygonMode value ) + { + switch ( value ) + { + case PolygonMode::eFill : return "Fill"; + case PolygonMode::eLine : return "Line"; + case PolygonMode::ePoint : return "Point"; + case PolygonMode::eFillRectangleNV : return "FillRectangleNV"; + default: return "invalid"; + } + } + + enum class PresentModeKHR + { + eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR, + eMailbox = VK_PRESENT_MODE_MAILBOX_KHR, + eFifo = VK_PRESENT_MODE_FIFO_KHR, + eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, + eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value ) + { + switch ( value ) + { + case PresentModeKHR::eImmediate : return "Immediate"; + case PresentModeKHR::eMailbox : return "Mailbox"; + case PresentModeKHR::eFifo : return "Fifo"; + case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed"; + case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh"; + case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh"; + default: return "invalid"; + } + } + + enum class PrimitiveTopology + { + ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, + eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST, + eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP, + eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, + eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP, + eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN, + eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY, + eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY, + eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY, + eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY, + ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST + }; + + VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value ) + { + switch ( value ) + { + case PrimitiveTopology::ePointList : return "PointList"; + case PrimitiveTopology::eLineList : return "LineList"; + case PrimitiveTopology::eLineStrip : return "LineStrip"; + case PrimitiveTopology::eTriangleList : return "TriangleList"; + case PrimitiveTopology::eTriangleStrip : return "TriangleStrip"; + case PrimitiveTopology::eTriangleFan : return "TriangleFan"; + case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency"; + case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency"; + case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency"; + case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency"; + case PrimitiveTopology::ePatchList : return "PatchList"; + default: return "invalid"; + } + } + + enum class QueryControlFlagBits : VkQueryControlFlags + { + ePrecise = VK_QUERY_CONTROL_PRECISE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value ) + { + switch ( value ) + { + case QueryControlFlagBits::ePrecise : return "Precise"; + default: return "invalid"; + } + } + + enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags + { + eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT, + eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT, + eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT, + eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT, + eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT, + eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT, + eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT, + eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT, + eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT, + eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT, + eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value ) + { + switch ( value ) + { + case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices"; + case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives"; + case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations"; + case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives"; + case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations"; + case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives"; + case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations"; + case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches"; + case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations"; + case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations"; + default: return "invalid"; + } + } + + enum class QueryPoolCreateFlagBits + {}; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits ) + { + return "(void)"; + } + + enum class QueryPoolSamplingModeINTEL + { + eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value ) + { + switch ( value ) + { + case QueryPoolSamplingModeINTEL::eManual : return "Manual"; + default: return "invalid"; + } + } + + enum class QueryResultFlagBits : VkQueryResultFlags + { + e64 = VK_QUERY_RESULT_64_BIT, + eWait = VK_QUERY_RESULT_WAIT_BIT, + eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT, + ePartial = VK_QUERY_RESULT_PARTIAL_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value ) + { + switch ( value ) + { + case QueryResultFlagBits::e64 : return "64"; + case QueryResultFlagBits::eWait : return "Wait"; + case QueryResultFlagBits::eWithAvailability : return "WithAvailability"; + case QueryResultFlagBits::ePartial : return "Partial"; + default: return "invalid"; + } + } + + enum class QueryType + { + eOcclusion = VK_QUERY_TYPE_OCCLUSION, + ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS, + eTimestamp = VK_QUERY_TYPE_TIMESTAMP, + eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT, + eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV, + ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL + }; + + VULKAN_HPP_INLINE std::string to_string( QueryType value ) + { + switch ( value ) + { + case QueryType::eOcclusion : return "Occlusion"; + case QueryType::ePipelineStatistics : return "PipelineStatistics"; + case QueryType::eTimestamp : return "Timestamp"; + case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT"; + case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV"; + case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL"; + default: return "invalid"; + } + } + + enum class QueueFlagBits : VkQueueFlags + { + eGraphics = VK_QUEUE_GRAPHICS_BIT, + eCompute = VK_QUEUE_COMPUTE_BIT, + eTransfer = VK_QUEUE_TRANSFER_BIT, + eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT, + eProtected = VK_QUEUE_PROTECTED_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value ) + { + switch ( value ) + { + case QueueFlagBits::eGraphics : return "Graphics"; + case QueueFlagBits::eCompute : return "Compute"; + case QueueFlagBits::eTransfer : return "Transfer"; + case QueueFlagBits::eSparseBinding : return "SparseBinding"; + case QueueFlagBits::eProtected : return "Protected"; + default: return "invalid"; + } + } + + enum class QueueGlobalPriorityEXT + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value ) + { + switch ( value ) + { + case QueueGlobalPriorityEXT::eLow : return "Low"; + case QueueGlobalPriorityEXT::eMedium : return "Medium"; + case QueueGlobalPriorityEXT::eHigh : return "High"; + case QueueGlobalPriorityEXT::eRealtime : return "Realtime"; + default: return "invalid"; + } + } + + enum class RasterizationOrderAMD + { + eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD, + eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value ) + { + switch ( value ) + { + case RasterizationOrderAMD::eStrict : return "Strict"; + case RasterizationOrderAMD::eRelaxed : return "Relaxed"; + default: return "invalid"; + } + } + + enum class RayTracingShaderGroupTypeNV + { + eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV, + eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV, + eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV + }; + + VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeNV value ) + { + switch ( value ) + { + case RayTracingShaderGroupTypeNV::eGeneral : return "General"; + case RayTracingShaderGroupTypeNV::eTrianglesHitGroup : return "TrianglesHitGroup"; + case RayTracingShaderGroupTypeNV::eProceduralHitGroup : return "ProceduralHitGroup"; + default: return "invalid"; + } + } + + enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits ) + { + return "(void)"; + } + + enum class ResolveModeFlagBitsKHR : VkResolveModeFlagsKHR + { + eNone = VK_RESOLVE_MODE_NONE_KHR, + eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR, + eAverage = VK_RESOLVE_MODE_AVERAGE_BIT_KHR, + eMin = VK_RESOLVE_MODE_MIN_BIT_KHR, + eMax = VK_RESOLVE_MODE_MAX_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBitsKHR value ) + { + switch ( value ) + { + case ResolveModeFlagBitsKHR::eNone : return "None"; + case ResolveModeFlagBitsKHR::eSampleZero : return "SampleZero"; + case ResolveModeFlagBitsKHR::eAverage : return "Average"; + case ResolveModeFlagBitsKHR::eMin : return "Min"; + case ResolveModeFlagBitsKHR::eMax : return "Max"; + default: return "invalid"; + } + } + + enum class Result + { + eSuccess = VK_SUCCESS, + eNotReady = VK_NOT_READY, + eTimeout = VK_TIMEOUT, + eEventSet = VK_EVENT_SET, + eEventReset = VK_EVENT_RESET, + eIncomplete = VK_INCOMPLETE, + eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY, + eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY, + eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED, + eErrorDeviceLost = VK_ERROR_DEVICE_LOST, + eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED, + eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT, + eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT, + eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT, + eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER, + eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS, + eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED, + eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL, + eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY, + eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE, + eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, + eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, + eSuboptimalKHR = VK_SUBOPTIMAL_KHR, + eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR, + eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR, + eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT, + eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV, + eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, + eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, + eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, + eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR, + eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( Result value ) + { + switch ( value ) + { + case Result::eSuccess : return "Success"; + case Result::eNotReady : return "NotReady"; + case Result::eTimeout : return "Timeout"; + case Result::eEventSet : return "EventSet"; + case Result::eEventReset : return "EventReset"; + case Result::eIncomplete : return "Incomplete"; + case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory"; + case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory"; + case Result::eErrorInitializationFailed : return "ErrorInitializationFailed"; + case Result::eErrorDeviceLost : return "ErrorDeviceLost"; + case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed"; + case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent"; + case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent"; + case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent"; + case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver"; + case Result::eErrorTooManyObjects : return "ErrorTooManyObjects"; + case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported"; + case Result::eErrorFragmentedPool : return "ErrorFragmentedPool"; + case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory"; + case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle"; + case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR"; + case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR"; + case Result::eSuboptimalKHR : return "SuboptimalKHR"; + case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR"; + case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR"; + case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT"; + case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV"; + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; + case Result::eErrorFragmentationEXT : return "ErrorFragmentationEXT"; + case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT"; + case Result::eErrorInvalidDeviceAddressEXT : return "ErrorInvalidDeviceAddressEXT"; + case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT"; + default: return "invalid"; + } + } + + enum class SampleCountFlagBits : VkSampleCountFlags + { + e1 = VK_SAMPLE_COUNT_1_BIT, + e2 = VK_SAMPLE_COUNT_2_BIT, + e4 = VK_SAMPLE_COUNT_4_BIT, + e8 = VK_SAMPLE_COUNT_8_BIT, + e16 = VK_SAMPLE_COUNT_16_BIT, + e32 = VK_SAMPLE_COUNT_32_BIT, + e64 = VK_SAMPLE_COUNT_64_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value ) + { + switch ( value ) + { + case SampleCountFlagBits::e1 : return "1"; + case SampleCountFlagBits::e2 : return "2"; + case SampleCountFlagBits::e4 : return "4"; + case SampleCountFlagBits::e8 : return "8"; + case SampleCountFlagBits::e16 : return "16"; + case SampleCountFlagBits::e32 : return "32"; + case SampleCountFlagBits::e64 : return "64"; + default: return "invalid"; + } + } + + enum class SamplerAddressMode + { + eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT, + eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT, + eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, + eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, + eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value ) + { + switch ( value ) + { + case SamplerAddressMode::eRepeat : return "Repeat"; + case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat"; + case SamplerAddressMode::eClampToEdge : return "ClampToEdge"; + case SamplerAddressMode::eClampToBorder : return "ClampToBorder"; + case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge"; + default: return "invalid"; + } + } + + enum class SamplerCreateFlagBits : VkSamplerCreateFlags + { + eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT, + eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value ) + { + switch ( value ) + { + case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT"; + case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT"; + default: return "invalid"; + } + } + + enum class SamplerMipmapMode + { + eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST, + eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value ) + { + switch ( value ) + { + case SamplerMipmapMode::eNearest : return "Nearest"; + case SamplerMipmapMode::eLinear : return "Linear"; + default: return "invalid"; + } + } + + enum class SamplerReductionModeEXT + { + eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, + eMin = VK_SAMPLER_REDUCTION_MODE_MIN_EXT, + eMax = VK_SAMPLER_REDUCTION_MODE_MAX_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SamplerReductionModeEXT value ) + { + switch ( value ) + { + case SamplerReductionModeEXT::eWeightedAverage : return "WeightedAverage"; + case SamplerReductionModeEXT::eMin : return "Min"; + case SamplerReductionModeEXT::eMax : return "Max"; + default: return "invalid"; + } + } + + enum class SamplerYcbcrModelConversion + { + eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 + }; + using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value ) + { + switch ( value ) + { + case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity"; + case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity"; + case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709"; + case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601"; + case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020"; + default: return "invalid"; + } + } + + enum class SamplerYcbcrRange + { + eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW + }; + using SamplerYcbcrRangeKHR = SamplerYcbcrRange; + + VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value ) + { + switch ( value ) + { + case SamplerYcbcrRange::eItuFull : return "ItuFull"; + case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow"; + default: return "invalid"; + } + } + + enum class ScopeNV + { + eDevice = VK_SCOPE_DEVICE_NV, + eWorkgroup = VK_SCOPE_WORKGROUP_NV, + eSubgroup = VK_SCOPE_SUBGROUP_NV, + eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ScopeNV value ) + { + switch ( value ) + { + case ScopeNV::eDevice : return "Device"; + case ScopeNV::eWorkgroup : return "Workgroup"; + case ScopeNV::eSubgroup : return "Subgroup"; + case ScopeNV::eQueueFamily : return "QueueFamily"; + default: return "invalid"; + } + } + + enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags + { + eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT + }; + using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits; + + VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value ) + { + switch ( value ) + { + case SemaphoreImportFlagBits::eTemporary : return "Temporary"; + default: return "invalid"; + } + } + + enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD + {}; + + VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD ) + { + return "(void)"; + } + + enum class ShaderFloatControlsIndependenceKHR + { + e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR, + eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR, + eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependenceKHR value ) + { + switch ( value ) + { + case ShaderFloatControlsIndependenceKHR::e32BitOnly : return "32BitOnly"; + case ShaderFloatControlsIndependenceKHR::eAll : return "All"; + case ShaderFloatControlsIndependenceKHR::eNone : return "None"; + default: return "invalid"; + } + } + + enum class ShaderInfoTypeAMD + { + eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD, + eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD, + eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value ) + { + switch ( value ) + { + case ShaderInfoTypeAMD::eStatistics : return "Statistics"; + case ShaderInfoTypeAMD::eBinary : return "Binary"; + case ShaderInfoTypeAMD::eDisassembly : return "Disassembly"; + default: return "invalid"; + } + } + + enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags + {}; + + VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits ) + { + return "(void)"; + } + + enum class ShaderStageFlagBits : VkShaderStageFlags + { + eVertex = VK_SHADER_STAGE_VERTEX_BIT, + eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, + eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, + eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT, + eFragment = VK_SHADER_STAGE_FRAGMENT_BIT, + eCompute = VK_SHADER_STAGE_COMPUTE_BIT, + eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS, + eAll = VK_SHADER_STAGE_ALL, + eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV, + eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV, + eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV, + eMissNV = VK_SHADER_STAGE_MISS_BIT_NV, + eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV, + eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV, + eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV, + eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value ) + { + switch ( value ) + { + case ShaderStageFlagBits::eVertex : return "Vertex"; + case ShaderStageFlagBits::eTessellationControl : return "TessellationControl"; + case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation"; + case ShaderStageFlagBits::eGeometry : return "Geometry"; + case ShaderStageFlagBits::eFragment : return "Fragment"; + case ShaderStageFlagBits::eCompute : return "Compute"; + case ShaderStageFlagBits::eAllGraphics : return "AllGraphics"; + case ShaderStageFlagBits::eAll : return "All"; + case ShaderStageFlagBits::eRaygenNV : return "RaygenNV"; + case ShaderStageFlagBits::eAnyHitNV : return "AnyHitNV"; + case ShaderStageFlagBits::eClosestHitNV : return "ClosestHitNV"; + case ShaderStageFlagBits::eMissNV : return "MissNV"; + case ShaderStageFlagBits::eIntersectionNV : return "IntersectionNV"; + case ShaderStageFlagBits::eCallableNV : return "CallableNV"; + case ShaderStageFlagBits::eTaskNV : return "TaskNV"; + case ShaderStageFlagBits::eMeshNV : return "MeshNV"; + default: return "invalid"; + } + } + + enum class ShadingRatePaletteEntryNV + { + eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV, + e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV, + e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV, + e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV, + e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV, + e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV, + e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV, + e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV, + e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV, + e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV, + e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV, + e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value ) + { + switch ( value ) + { + case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations"; + case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels"; + case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels"; + default: return "invalid"; + } + } + + enum class SharingMode + { + eExclusive = VK_SHARING_MODE_EXCLUSIVE, + eConcurrent = VK_SHARING_MODE_CONCURRENT + }; + + VULKAN_HPP_INLINE std::string to_string( SharingMode value ) + { + switch ( value ) + { + case SharingMode::eExclusive : return "Exclusive"; + case SharingMode::eConcurrent : return "Concurrent"; + default: return "invalid"; + } + } + + enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags + { + eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT, + eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT, + eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value ) + { + switch ( value ) + { + case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail"; + case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize"; + case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize"; + default: return "invalid"; + } + } + + enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags + { + eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT + }; + + VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value ) + { + switch ( value ) + { + case SparseMemoryBindFlagBits::eMetadata : return "Metadata"; + default: return "invalid"; + } + } + + enum class StencilFaceFlagBits : VkStencilFaceFlags + { + eFront = VK_STENCIL_FACE_FRONT_BIT, + eBack = VK_STENCIL_FACE_BACK_BIT, + eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK, + eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK + }; + + VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value ) + { + switch ( value ) + { + case StencilFaceFlagBits::eFront : return "Front"; + case StencilFaceFlagBits::eBack : return "Back"; + case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack"; + default: return "invalid"; + } + } + + enum class StencilOp + { + eKeep = VK_STENCIL_OP_KEEP, + eZero = VK_STENCIL_OP_ZERO, + eReplace = VK_STENCIL_OP_REPLACE, + eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP, + eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP, + eInvert = VK_STENCIL_OP_INVERT, + eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP, + eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP + }; + + VULKAN_HPP_INLINE std::string to_string( StencilOp value ) + { + switch ( value ) + { + case StencilOp::eKeep : return "Keep"; + case StencilOp::eZero : return "Zero"; + case StencilOp::eReplace : return "Replace"; + case StencilOp::eIncrementAndClamp : return "IncrementAndClamp"; + case StencilOp::eDecrementAndClamp : return "DecrementAndClamp"; + case StencilOp::eInvert : return "Invert"; + case StencilOp::eIncrementAndWrap : return "IncrementAndWrap"; + case StencilOp::eDecrementAndWrap : return "DecrementAndWrap"; + default: return "invalid"; + } + } + + enum class StructureType + { + eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO, + eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, + eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, + eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, + eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO, + eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, + eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, + eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, + eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO, + eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO, + eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO, + eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, + eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO, + eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, + eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, + eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, + ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO, + ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, + ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, + ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, + ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO, + ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, + ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, + ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, + ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, + ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, + ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, + eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, + eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO, + ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, + eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, + eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, + eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO, + eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, + eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET, + eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET, + eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, + eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, + eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, + eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, + eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO, + eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO, + eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, + eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER, + eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, + eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER, + eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO, + eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, + ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, + eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, + ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, + ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES, + eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2, + eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, + ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, + eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, + eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR, + eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR, + eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR, + eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR, + eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR, + eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR, + eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, + eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR, + eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, + eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, + eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, + eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, + eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, + eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD, + eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT, + eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT, + eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT, + eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV, + eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV, + eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV, + ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT, + ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT, + ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT, + eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, + eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, + eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, + ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV, + eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV, + eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV, + eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV, + eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV, + eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT, + eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, + ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, + eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, + eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR, + eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR, + eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR, + eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR, + eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR, + eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR, + eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR, + eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR, + eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR, + eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, + eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, + ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, + eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, + ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR, + eObjectTableCreateInfoNVX = VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX, + eIndirectCommandsLayoutCreateInfoNVX = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX, + eCmdProcessCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX, + eCmdReserveSpaceForCommandsInfoNVX = VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX, + eDeviceGeneratedCommandsLimitsNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX, + eDeviceGeneratedCommandsFeaturesNVX = VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX, + ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV, + eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT, + eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT, + eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT, + eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, + ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, + ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, + ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, + ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT, + ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT, + ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT, + eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT, + ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR, + eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR, + eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR, + eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR, + eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR, + eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR, + eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR, + eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR, + eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR, + eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR, + eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR, + eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR, + eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR, + eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR, + eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, + eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR, + ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR, + eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR, + eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR, + eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR, + eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR, + eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR, + eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR, + eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR, + eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, + eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, + eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT, + eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT, + eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT, + eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT, + eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT, + eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID, + eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID, + eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID, + eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID, + eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID, + ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT, + eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT, + ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT, + ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT, + eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT, + eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT, + eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, + eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, + ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, + ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT, + eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT, + eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR, + ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT, + ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT, + ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT, + ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV, + ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV, + ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV, + ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV, + eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT, + eDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT, + eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT, + eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT, + eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT, + eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT, + eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT, + eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT, + ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT, + eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT, + eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT, + ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV, + ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV, + ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV, + ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV, + eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV, + eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV, + eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV, + eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV, + eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV, + eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV, + eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV, + eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV, + ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV, + eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV, + eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV, + ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV, + ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, + ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, + eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR, + eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, + eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, + ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, + ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, + ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, + eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, + ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, + ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT, + ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR, + ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, + ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR, + eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, + ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, + ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, + ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, + eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, + eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, + eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, + eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL, + ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL, + ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL, + ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL, + ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL, + ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR, + ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT, + eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD, + eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD, + eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA, + eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, + eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT, + ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT, + ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT, + ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT, + ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, + ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, + ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, + ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT, + eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT, + eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR, + ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV, + ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT, + eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT, + eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT, + eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT, + ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV, + eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV, + ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV, + ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV, + eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV, + ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT, + ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT, + ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR, + eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT, + eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT, + eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, + eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, + ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR, + ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, + ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, + ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, + ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT, + ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT, + ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES, + eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT, + eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR, + ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR, + ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, + ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR, + ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR, + eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR, + eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR, + eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR, + ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR, + eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR, + ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR, + eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR, + eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR, + eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR, + eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR, + eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR, + eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR, + eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR, + ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR, + eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR, + ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR, + eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR, + ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR, + eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR, + ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR, + eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR, + eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR, + eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR, + eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR, + eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR, + ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR, + eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR, + eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR, + ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR, + eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR, + eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, + ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR, + eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR, + eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR, + eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR, + eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR, + eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR, + eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR, + eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR, + eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR, + ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR, + eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR, + eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR, + eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR, + ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR, + eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR, + ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( StructureType value ) + { + switch ( value ) + { + case StructureType::eApplicationInfo : return "ApplicationInfo"; + case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo"; + case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo"; + case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo"; + case StructureType::eSubmitInfo : return "SubmitInfo"; + case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo"; + case StructureType::eMappedMemoryRange : return "MappedMemoryRange"; + case StructureType::eBindSparseInfo : return "BindSparseInfo"; + case StructureType::eFenceCreateInfo : return "FenceCreateInfo"; + case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo"; + case StructureType::eEventCreateInfo : return "EventCreateInfo"; + case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo"; + case StructureType::eBufferCreateInfo : return "BufferCreateInfo"; + case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo"; + case StructureType::eImageCreateInfo : return "ImageCreateInfo"; + case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo"; + case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo"; + case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo"; + case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo"; + case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo"; + case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo"; + case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo"; + case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo"; + case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo"; + case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo"; + case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo"; + case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo"; + case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo"; + case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo"; + case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo"; + case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo"; + case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo"; + case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo"; + case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo"; + case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo"; + case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet"; + case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet"; + case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo"; + case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo"; + case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo"; + case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo"; + case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo"; + case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo"; + case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo"; + case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier"; + case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier"; + case StructureType::eMemoryBarrier : return "MemoryBarrier"; + case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo"; + case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo"; + case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties"; + case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo"; + case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo"; + case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures"; + case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements"; + case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo"; + case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo"; + case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo"; + case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo"; + case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo"; + case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo"; + case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo"; + case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo"; + case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties"; + case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo"; + case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2"; + case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2"; + case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2"; + case StructureType::eMemoryRequirements2 : return "MemoryRequirements2"; + case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2"; + case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2"; + case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2"; + case StructureType::eFormatProperties2 : return "FormatProperties2"; + case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2"; + case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2"; + case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2"; + case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2"; + case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2"; + case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2"; + case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties"; + case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo"; + case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo"; + case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo"; + case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo"; + case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures"; + case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties"; + case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures"; + case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo"; + case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures"; + case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties"; + case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2"; + case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo"; + case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo"; + case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo"; + case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo"; + case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures"; + case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties"; + case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo"; + case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo"; + case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties"; + case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo"; + case StructureType::eExternalBufferProperties : return "ExternalBufferProperties"; + case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties"; + case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo"; + case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo"; + case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo"; + case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo"; + case StructureType::eExternalFenceProperties : return "ExternalFenceProperties"; + case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo"; + case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo"; + case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo"; + case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties"; + case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties"; + case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport"; + case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures"; + case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR"; + case StructureType::ePresentInfoKHR : return "PresentInfoKHR"; + case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR"; + case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR"; + case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR"; + case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR"; + case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR"; + case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR"; + case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR"; + case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR"; + case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR"; + case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR"; + case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR"; + case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR"; + case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR"; + case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR"; + case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT"; + case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD"; + case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT"; + case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT"; + case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT"; + case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV"; + case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV"; + case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV"; + case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT"; + case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT"; + case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT"; + case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX"; + case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD"; + case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP"; + case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV"; + case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV"; + case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV"; + case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV"; + case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV"; + case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT"; + case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN"; + case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT"; + case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT"; + case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT"; + case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR"; + case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR"; + case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR"; + case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR"; + case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR"; + case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR"; + case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR"; + case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR"; + case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR"; + case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR"; + case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR"; + case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR"; + case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR"; + case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR"; + case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR"; + case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT"; + case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT"; + case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT"; + case StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR : return "PhysicalDeviceShaderFloat16Int8FeaturesKHR"; + case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR"; + case StructureType::eObjectTableCreateInfoNVX : return "ObjectTableCreateInfoNVX"; + case StructureType::eIndirectCommandsLayoutCreateInfoNVX : return "IndirectCommandsLayoutCreateInfoNVX"; + case StructureType::eCmdProcessCommandsInfoNVX : return "CmdProcessCommandsInfoNVX"; + case StructureType::eCmdReserveSpaceForCommandsInfoNVX : return "CmdReserveSpaceForCommandsInfoNVX"; + case StructureType::eDeviceGeneratedCommandsLimitsNVX : return "DeviceGeneratedCommandsLimitsNVX"; + case StructureType::eDeviceGeneratedCommandsFeaturesNVX : return "DeviceGeneratedCommandsFeaturesNVX"; + case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV"; + case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT"; + case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT"; + case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT"; + case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT"; + case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT"; + case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE"; + case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV"; + case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT"; + case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT"; + case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT"; + case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT"; + case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT"; + case StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR : return "PhysicalDeviceImagelessFramebufferFeaturesKHR"; + case StructureType::eFramebufferAttachmentsCreateInfoKHR : return "FramebufferAttachmentsCreateInfoKHR"; + case StructureType::eFramebufferAttachmentImageInfoKHR : return "FramebufferAttachmentImageInfoKHR"; + case StructureType::eRenderPassAttachmentBeginInfoKHR : return "RenderPassAttachmentBeginInfoKHR"; + case StructureType::eAttachmentDescription2KHR : return "AttachmentDescription2KHR"; + case StructureType::eAttachmentReference2KHR : return "AttachmentReference2KHR"; + case StructureType::eSubpassDescription2KHR : return "SubpassDescription2KHR"; + case StructureType::eSubpassDependency2KHR : return "SubpassDependency2KHR"; + case StructureType::eRenderPassCreateInfo2KHR : return "RenderPassCreateInfo2KHR"; + case StructureType::eSubpassBeginInfoKHR : return "SubpassBeginInfoKHR"; + case StructureType::eSubpassEndInfoKHR : return "SubpassEndInfoKHR"; + case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR"; + case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR"; + case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR"; + case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR"; + case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR"; + case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR"; + case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR"; + case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR"; + case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR"; + case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR"; + case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR"; + case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR"; + case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR"; + case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR"; + case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK"; + case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK"; + case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT"; + case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT"; + case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT"; + case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT"; + case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT"; + case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID"; + case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID"; + case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID"; + case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID"; + case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID"; + case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID"; + case StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT : return "PhysicalDeviceSamplerFilterMinmaxPropertiesEXT"; + case StructureType::eSamplerReductionModeCreateInfoEXT : return "SamplerReductionModeCreateInfoEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT"; + case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT"; + case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT"; + case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT"; + case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT"; + case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT"; + case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT"; + case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT"; + case StructureType::eImageFormatListCreateInfoKHR : return "ImageFormatListCreateInfoKHR"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT"; + case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT"; + case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT"; + case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV"; + case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV"; + case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT"; + case StructureType::eDrmFormatModifierPropertiesEXT : return "DrmFormatModifierPropertiesEXT"; + case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT"; + case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT"; + case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT"; + case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT"; + case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT"; + case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT : return "DescriptorSetLayoutBindingFlagsCreateInfoEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT : return "PhysicalDeviceDescriptorIndexingFeaturesEXT"; + case StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT : return "PhysicalDeviceDescriptorIndexingPropertiesEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT : return "DescriptorSetVariableDescriptorCountAllocateInfoEXT"; + case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT : return "DescriptorSetVariableDescriptorCountLayoutSupportEXT"; + case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV"; + case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV"; + case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV"; + case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV"; + case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV"; + case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV"; + case StructureType::eGeometryNV : return "GeometryNV"; + case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV"; + case StructureType::eGeometryAabbNV : return "GeometryAabbNV"; + case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV"; + case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV"; + case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV"; + case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV"; + case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV"; + case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV"; + case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV"; + case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; + case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT"; + case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT"; + case StructureType::ePhysicalDevice8BitStorageFeaturesKHR : return "PhysicalDevice8BitStorageFeaturesKHR"; + case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT"; + case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT"; + case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; + case StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR : return "PhysicalDeviceShaderAtomicInt64FeaturesKHR"; + case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD"; + case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT"; + case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD"; + case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT"; + case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP"; + case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT"; + case StructureType::ePhysicalDeviceDriverPropertiesKHR : return "PhysicalDeviceDriverPropertiesKHR"; + case StructureType::ePhysicalDeviceFloatControlsPropertiesKHR : return "PhysicalDeviceFloatControlsPropertiesKHR"; + case StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR : return "PhysicalDeviceDepthStencilResolvePropertiesKHR"; + case StructureType::eSubpassDescriptionDepthStencilResolveKHR : return "SubpassDescriptionDepthStencilResolveKHR"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV"; + case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV"; + case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV"; + case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV"; + case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV"; + case StructureType::eCheckpointDataNV : return "CheckpointDataNV"; + case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; + case StructureType::eQueryPoolCreateInfoINTEL : return "QueryPoolCreateInfoINTEL"; + case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL"; + case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL"; + case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL"; + case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL"; + case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL"; + case StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR : return "PhysicalDeviceVulkanMemoryModelFeaturesKHR"; + case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT"; + case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD"; + case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD"; + case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA"; + case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; + case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; + case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT : return "PhysicalDeviceScalarBlockLayoutFeaturesEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT"; + case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT"; + case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT"; + case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD"; + case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD"; + case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT"; + case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT"; + case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT"; + case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR"; + case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV"; + case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT"; + case StructureType::eBufferDeviceAddressInfoEXT : return "BufferDeviceAddressInfoEXT"; + case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT"; + case StructureType::eImageStencilUsageCreateInfoEXT : return "ImageStencilUsageCreateInfoEXT"; + case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV"; + case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV"; + case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV"; + case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV"; + case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV"; + case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT"; + case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT"; + case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR : return "PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR"; + case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT"; + case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT"; + case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT"; + case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT"; + case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT"; + case StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT : return "PhysicalDeviceHostQueryResetFeaturesEXT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT"; + case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; + case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR"; + case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR"; + case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR"; + case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR"; + case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR"; + case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT"; + case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT"; + default: return "invalid"; + } + } + + enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags + { + eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT, + eVote = VK_SUBGROUP_FEATURE_VOTE_BIT, + eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT, + eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT, + eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT, + eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, + eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, + eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV + }; + + VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value ) + { + switch ( value ) + { + case SubgroupFeatureFlagBits::eBasic : return "Basic"; + case SubgroupFeatureFlagBits::eVote : return "Vote"; + case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic"; + case SubgroupFeatureFlagBits::eBallot : return "Ballot"; + case SubgroupFeatureFlagBits::eShuffle : return "Shuffle"; + case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative"; + case SubgroupFeatureFlagBits::eClustered : return "Clustered"; + case SubgroupFeatureFlagBits::eQuad : return "Quad"; + case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV"; + default: return "invalid"; + } + } + + enum class SubpassContents + { + eInline = VK_SUBPASS_CONTENTS_INLINE, + eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassContents value ) + { + switch ( value ) + { + case SubpassContents::eInline : return "Inline"; + case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers"; + default: return "invalid"; + } + } + + enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags + { + ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX, + ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX + }; + + VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value ) + { + switch ( value ) + { + case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX"; + case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX"; + default: return "invalid"; + } + } + + enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT + { + eVblank = VK_SURFACE_COUNTER_VBLANK_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value ) + { + switch ( value ) + { + case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank"; + default: return "invalid"; + } + } + + enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR + { + eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR, + eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR, + eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR, + eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR, + eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR, + eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR, + eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR, + eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR, + eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) + { + switch ( value ) + { + case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity"; + case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90"; + case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180"; + case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180"; + case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270"; + case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit"; + default: return "invalid"; + } + } + + enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR + { + eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR, + eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR, + eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR + }; + + VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value ) + { + switch ( value ) + { + case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions"; + case SwapchainCreateFlagBitsKHR::eProtected : return "Protected"; + case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat"; + default: return "invalid"; + } + } + + enum class SystemAllocationScope + { + eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, + eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, + eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE, + eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE, + eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value ) + { + switch ( value ) + { + case SystemAllocationScope::eCommand : return "Command"; + case SystemAllocationScope::eObject : return "Object"; + case SystemAllocationScope::eCache : return "Cache"; + case SystemAllocationScope::eDevice : return "Device"; + case SystemAllocationScope::eInstance : return "Instance"; + default: return "invalid"; + } + } + + enum class TessellationDomainOrigin + { + eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT + }; + using TessellationDomainOriginKHR = TessellationDomainOrigin; + + VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value ) + { + switch ( value ) + { + case TessellationDomainOrigin::eUpperLeft : return "UpperLeft"; + case TessellationDomainOrigin::eLowerLeft : return "LowerLeft"; + default: return "invalid"; + } + } + + enum class TimeDomainEXT + { + eDevice = VK_TIME_DOMAIN_DEVICE_EXT, + eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT, + eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT, + eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value ) + { + switch ( value ) + { + case TimeDomainEXT::eDevice : return "Device"; + case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic"; + case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw"; + case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter"; + default: return "invalid"; + } + } + + enum class ValidationCacheHeaderVersionEXT + { + eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value ) + { + switch ( value ) + { + case ValidationCacheHeaderVersionEXT::eOne : return "One"; + default: return "invalid"; + } + } + + enum class ValidationCheckEXT + { + eAll = VK_VALIDATION_CHECK_ALL_EXT, + eShaders = VK_VALIDATION_CHECK_SHADERS_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value ) + { + switch ( value ) + { + case ValidationCheckEXT::eAll : return "All"; + case ValidationCheckEXT::eShaders : return "Shaders"; + default: return "invalid"; + } + } + + enum class ValidationFeatureDisableEXT + { + eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT, + eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT, + eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT, + eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT, + eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT, + eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT, + eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value ) + { + switch ( value ) + { + case ValidationFeatureDisableEXT::eAll : return "All"; + case ValidationFeatureDisableEXT::eShaders : return "Shaders"; + case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety"; + case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters"; + case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes"; + case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks"; + case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles"; + default: return "invalid"; + } + } + + enum class ValidationFeatureEnableEXT + { + eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT, + eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT, + eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT + }; + + VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value ) + { + switch ( value ) + { + case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted"; + case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot"; + case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices"; + default: return "invalid"; + } + } + + enum class VendorId + { + eVIV = VK_VENDOR_ID_VIV, + eVSI = VK_VENDOR_ID_VSI, + eKazan = VK_VENDOR_ID_KAZAN + }; + + VULKAN_HPP_INLINE std::string to_string( VendorId value ) + { + switch ( value ) + { + case VendorId::eVIV : return "VIV"; + case VendorId::eVSI : return "VSI"; + case VendorId::eKazan : return "Kazan"; + default: return "invalid"; + } + } + + enum class VertexInputRate + { + eVertex = VK_VERTEX_INPUT_RATE_VERTEX, + eInstance = VK_VERTEX_INPUT_RATE_INSTANCE + }; + + VULKAN_HPP_INLINE std::string to_string( VertexInputRate value ) + { + switch ( value ) + { + case VertexInputRate::eVertex : return "Vertex"; + case VertexInputRate::eInstance : return "Instance"; + default: return "invalid"; + } + } + + enum class ViewportCoordinateSwizzleNV + { + ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, + eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV, + ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV, + eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV, + ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV, + eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV, + ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV, + eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV + }; + + VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value ) + { + switch ( value ) + { + case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX"; + case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX"; + case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY"; + case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY"; + case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ"; + case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ"; + case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW"; + case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW"; + default: return "invalid"; + } + } + + template + struct cpp_type + { + }; + +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/exceptions.hpp b/app/libraries/VulkanHpp/vulkan/internal/exceptions.hpp new file mode 100644 index 00000000..eb17c598 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/exceptions.hpp @@ -0,0 +1,497 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "bitmasks.hpp" + +#ifndef VULKAN_HPP_NO_EXCEPTIONS +namespace std +{ + template <> + struct is_error_code_enum : public true_type + {}; +} +#endif + +namespace VULKAN_HPP_NAMESPACE +{ +#ifndef VULKAN_HPP_NO_EXCEPTIONS + class ErrorCategoryImpl : public std::error_category + { + public: + virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; } + virtual std::string message(int ev) const override { return to_string(static_cast(ev)); } + }; + + class Error + { + public: + Error() VULKAN_HPP_NOEXCEPT = default; + Error(const Error&) VULKAN_HPP_NOEXCEPT = default; + virtual ~Error() VULKAN_HPP_NOEXCEPT = default; + + virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0; + }; + + class LogicError : public Error, public std::logic_error + { + public: + explicit LogicError( const std::string& what ) + : Error(), std::logic_error(what) {} + explicit LogicError( char const * what ) + : Error(), std::logic_error(what) {} + + virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); } + }; + + class SystemError : public Error, public std::system_error + { + public: + SystemError( std::error_code ec ) + : Error(), std::system_error(ec) {} + SystemError( std::error_code ec, std::string const& what ) + : Error(), std::system_error(ec, what) {} + SystemError( std::error_code ec, char const * what ) + : Error(), std::system_error(ec, what) {} + SystemError( int ev, std::error_category const& ecat ) + : Error(), std::system_error(ev, ecat) {} + SystemError( int ev, std::error_category const& ecat, std::string const& what) + : Error(), std::system_error(ev, ecat, what) {} + SystemError( int ev, std::error_category const& ecat, char const * what) + : Error(), std::system_error(ev, ecat, what) {} + + virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); } + }; + + VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT + { + static ErrorCategoryImpl instance; + return instance; + } + + VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT + { + return std::error_code(static_cast(e), errorCategory()); + } + + VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT + { + return std::error_condition(static_cast(e), errorCategory()); + } + + class OutOfHostMemoryError : public SystemError + { + public: + OutOfHostMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + OutOfHostMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {} + }; + + class OutOfDeviceMemoryError : public SystemError + { + public: + OutOfDeviceMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + OutOfDeviceMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {} + }; + + class InitializationFailedError : public SystemError + { + public: + InitializationFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + InitializationFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {} + }; + + class DeviceLostError : public SystemError + { + public: + DeviceLostError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + DeviceLostError( char const * message ) + : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {} + }; + + class MemoryMapFailedError : public SystemError + { + public: + MemoryMapFailedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + MemoryMapFailedError( char const * message ) + : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {} + }; + + class LayerNotPresentError : public SystemError + { + public: + LayerNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + LayerNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {} + }; + + class ExtensionNotPresentError : public SystemError + { + public: + ExtensionNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + ExtensionNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {} + }; + + class FeatureNotPresentError : public SystemError + { + public: + FeatureNotPresentError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + FeatureNotPresentError( char const * message ) + : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {} + }; + + class IncompatibleDriverError : public SystemError + { + public: + IncompatibleDriverError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + IncompatibleDriverError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {} + }; + + class TooManyObjectsError : public SystemError + { + public: + TooManyObjectsError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + TooManyObjectsError( char const * message ) + : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {} + }; + + class FormatNotSupportedError : public SystemError + { + public: + FormatNotSupportedError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + FormatNotSupportedError( char const * message ) + : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {} + }; + + class FragmentedPoolError : public SystemError + { + public: + FragmentedPoolError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + FragmentedPoolError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {} + }; + + class OutOfPoolMemoryError : public SystemError + { + public: + OutOfPoolMemoryError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + OutOfPoolMemoryError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {} + }; + + class InvalidExternalHandleError : public SystemError + { + public: + InvalidExternalHandleError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + InvalidExternalHandleError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {} + }; + + class SurfaceLostKHRError : public SystemError + { + public: + SurfaceLostKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + SurfaceLostKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {} + }; + + class NativeWindowInUseKHRError : public SystemError + { + public: + NativeWindowInUseKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + NativeWindowInUseKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {} + }; + + class OutOfDateKHRError : public SystemError + { + public: + OutOfDateKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + OutOfDateKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {} + }; + + class IncompatibleDisplayKHRError : public SystemError + { + public: + IncompatibleDisplayKHRError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + IncompatibleDisplayKHRError( char const * message ) + : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {} + }; + + class ValidationFailedEXTError : public SystemError + { + public: + ValidationFailedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + ValidationFailedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {} + }; + + class InvalidShaderNVError : public SystemError + { + public: + InvalidShaderNVError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + InvalidShaderNVError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {} + }; + + class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError + { + public: + InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {} + }; + + class FragmentationEXTError : public SystemError + { + public: + FragmentationEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + FragmentationEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFragmentationEXT ), message ) {} + }; + + class NotPermittedEXTError : public SystemError + { + public: + NotPermittedEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + NotPermittedEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {} + }; + + class InvalidDeviceAddressEXTError : public SystemError + { + public: + InvalidDeviceAddressEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {} + InvalidDeviceAddressEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorInvalidDeviceAddressEXT ), message ) {} + }; + + class FullScreenExclusiveModeLostEXTError : public SystemError + { + public: + FullScreenExclusiveModeLostEXTError( std::string const& message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + FullScreenExclusiveModeLostEXTError( char const * message ) + : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {} + }; + + [[noreturn]] static void throwResultException( Result result, char const * message ) + { + switch ( result ) + { + case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message ); + case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message ); + case Result::eErrorInitializationFailed: throw InitializationFailedError( message ); + case Result::eErrorDeviceLost: throw DeviceLostError( message ); + case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message ); + case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message ); + case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message ); + case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message ); + case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message ); + case Result::eErrorTooManyObjects: throw TooManyObjectsError( message ); + case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message ); + case Result::eErrorFragmentedPool: throw FragmentedPoolError( message ); + case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message ); + case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); + case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); + case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); + case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); + case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message ); + case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message ); + case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message ); + case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); + case Result::eErrorFragmentationEXT: throw FragmentationEXTError( message ); + case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message ); + case Result::eErrorInvalidDeviceAddressEXT: throw InvalidDeviceAddressEXTError( message ); + case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); + default: throw SystemError( make_error_code( result ) ); + } + } +#endif + + template void ignore(T const&) VULKAN_HPP_NOEXCEPT {} + + template + struct ResultValue + { +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v))) +#else + ResultValue( Result r, T & v ) +#endif + : result( r ) + , value( v ) + {} + +#ifdef VULKAN_HPP_HAS_NOEXCEPT + ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v)))) +#else + ResultValue( Result r, T && v ) +#endif + : result( r ) + , value( std::move( v ) ) + {} + + Result result; + T value; + + operator std::tuple() VULKAN_HPP_NOEXCEPT { return std::tuple(result, value); } + }; + + template + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef ResultValue type; +#else + typedef T type; +#endif + }; + + template <> + struct ResultValueType + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + typedef Result type; +#else + typedef void type; +#endif + }; + + VULKAN_HPP_INLINE ResultValueType::type createResultValue( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore(message); + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return result; +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } +#endif + } + + template + VULKAN_HPP_INLINE typename ResultValueType::type createResultValue( Result result, T & data, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore(message); + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue( result, std::move( data ) ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return std::move( data ); +#endif + } + + VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore(message); + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return result; + } + + template + VULKAN_HPP_INLINE ResultValue createResultValue( Result result, T & data, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore(message); + VULKAN_HPP_ASSERT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + throwResultException( result, message ); + } +#endif + return ResultValue( result, data ); + } + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_INLINE typename ResultValueType>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits::deleter const& deleter ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + ignore(message); + VULKAN_HPP_ASSERT( result == Result::eSuccess ); + return ResultValue>( result, UniqueHandle(data, deleter) ); +#else + if ( result != Result::eSuccess ) + { + throwResultException( result, message ); + } + return UniqueHandle(data, deleter); +#endif + } +#endif +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/forward.hpp b/app/libraries/VulkanHpp/vulkan/internal/forward.hpp new file mode 100644 index 00000000..a2c8a987 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/forward.hpp @@ -0,0 +1,655 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "exceptions.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct AccelerationStructureCreateInfoNV; + struct AccelerationStructureInfoNV; + struct AccelerationStructureMemoryRequirementsInfoNV; + struct AcquireNextImageInfoKHR; + struct AllocationCallbacks; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferFormatPropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferPropertiesANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferUsageANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct ApplicationInfo; + struct AttachmentDescription; + struct AttachmentDescription2KHR; + struct AttachmentReference; + struct AttachmentReference2KHR; + struct AttachmentSampleLocationsEXT; + struct BaseInStructure; + struct BaseOutStructure; + struct BindAccelerationStructureMemoryInfoNV; + struct BindBufferMemoryDeviceGroupInfo; + using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo; + struct BindBufferMemoryInfo; + using BindBufferMemoryInfoKHR = BindBufferMemoryInfo; + struct BindImageMemoryDeviceGroupInfo; + using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo; + struct BindImageMemoryInfo; + using BindImageMemoryInfoKHR = BindImageMemoryInfo; + struct BindImageMemorySwapchainInfoKHR; + struct BindImagePlaneMemoryInfo; + using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct BindSparseInfo; + struct BufferCopy; + struct BufferCreateInfo; + struct BufferDeviceAddressCreateInfoEXT; + struct BufferDeviceAddressInfoEXT; + struct BufferImageCopy; + struct BufferMemoryBarrier; + struct BufferMemoryRequirementsInfo2; + using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2; + struct BufferViewCreateInfo; + struct CalibratedTimestampInfoEXT; + struct CheckpointDataNV; + struct ClearAttachment; + union ClearColorValue; + struct ClearDepthStencilValue; + struct ClearRect; + union ClearValue; + struct CmdProcessCommandsInfoNVX; + struct CmdReserveSpaceForCommandsInfoNVX; + struct CoarseSampleLocationNV; + struct CoarseSampleOrderCustomNV; + struct CommandBufferAllocateInfo; + struct CommandBufferBeginInfo; + struct CommandBufferInheritanceConditionalRenderingInfoEXT; + struct CommandBufferInheritanceInfo; + struct CommandPoolCreateInfo; + struct ComponentMapping; + struct ComputePipelineCreateInfo; + struct ConditionalRenderingBeginInfoEXT; + struct ConformanceVersionKHR; + struct CooperativeMatrixPropertiesNV; + struct CopyDescriptorSet; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct D3D12FenceSubmitInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct DebugMarkerMarkerInfoEXT; + struct DebugMarkerObjectNameInfoEXT; + struct DebugMarkerObjectTagInfoEXT; + struct DebugReportCallbackCreateInfoEXT; + struct DebugUtilsLabelEXT; + struct DebugUtilsMessengerCallbackDataEXT; + struct DebugUtilsMessengerCreateInfoEXT; + struct DebugUtilsObjectNameInfoEXT; + struct DebugUtilsObjectTagInfoEXT; + struct DedicatedAllocationBufferCreateInfoNV; + struct DedicatedAllocationImageCreateInfoNV; + struct DedicatedAllocationMemoryAllocateInfoNV; + struct DescriptorBufferInfo; + struct DescriptorImageInfo; + struct DescriptorPoolCreateInfo; + struct DescriptorPoolInlineUniformBlockCreateInfoEXT; + struct DescriptorPoolSize; + struct DescriptorSetAllocateInfo; + struct DescriptorSetLayoutBinding; + struct DescriptorSetLayoutBindingFlagsCreateInfoEXT; + struct DescriptorSetLayoutCreateInfo; + struct DescriptorSetLayoutSupport; + using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport; + struct DescriptorSetVariableDescriptorCountAllocateInfoEXT; + struct DescriptorSetVariableDescriptorCountLayoutSupportEXT; + struct DescriptorUpdateTemplateCreateInfo; + using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo; + struct DescriptorUpdateTemplateEntry; + using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry; + struct DeviceCreateInfo; + struct DeviceEventInfoEXT; + struct DeviceGeneratedCommandsFeaturesNVX; + struct DeviceGeneratedCommandsLimitsNVX; + struct DeviceGroupBindSparseInfo; + using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo; + struct DeviceGroupCommandBufferBeginInfo; + using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo; + struct DeviceGroupDeviceCreateInfo; + using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo; + struct DeviceGroupPresentCapabilitiesKHR; + struct DeviceGroupPresentInfoKHR; + struct DeviceGroupRenderPassBeginInfo; + using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo; + struct DeviceGroupSubmitInfo; + using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo; + struct DeviceGroupSwapchainCreateInfoKHR; + struct DeviceMemoryOverallocationCreateInfoAMD; + struct DeviceQueueCreateInfo; + struct DeviceQueueGlobalPriorityCreateInfoEXT; + struct DeviceQueueInfo2; + struct DispatchIndirectCommand; + struct DisplayEventInfoEXT; + struct DisplayModeCreateInfoKHR; + struct DisplayModeParametersKHR; + struct DisplayModeProperties2KHR; + struct DisplayModePropertiesKHR; + struct DisplayNativeHdrSurfaceCapabilitiesAMD; + struct DisplayPlaneCapabilities2KHR; + struct DisplayPlaneCapabilitiesKHR; + struct DisplayPlaneInfo2KHR; + struct DisplayPlaneProperties2KHR; + struct DisplayPlanePropertiesKHR; + struct DisplayPowerInfoEXT; + struct DisplayPresentInfoKHR; + struct DisplayProperties2KHR; + struct DisplayPropertiesKHR; + struct DisplaySurfaceCreateInfoKHR; + struct DrawIndexedIndirectCommand; + struct DrawIndirectCommand; + struct DrawMeshTasksIndirectCommandNV; + struct DrmFormatModifierPropertiesEXT; + struct DrmFormatModifierPropertiesListEXT; + struct EventCreateInfo; + struct ExportFenceCreateInfo; + using ExportFenceCreateInfoKHR = ExportFenceCreateInfo; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportFenceWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ExportMemoryAllocateInfo; + using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo; + struct ExportMemoryAllocateInfoNV; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ExportSemaphoreCreateInfo; + using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportSemaphoreWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ExtensionProperties; + struct Extent2D; + struct Extent3D; + struct ExternalBufferProperties; + using ExternalBufferPropertiesKHR = ExternalBufferProperties; + struct ExternalFenceProperties; + using ExternalFencePropertiesKHR = ExternalFenceProperties; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ExternalFormatANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct ExternalImageFormatProperties; + using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties; + struct ExternalImageFormatPropertiesNV; + struct ExternalMemoryBufferCreateInfo; + using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo; + struct ExternalMemoryImageCreateInfo; + using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo; + struct ExternalMemoryImageCreateInfoNV; + struct ExternalMemoryProperties; + using ExternalMemoryPropertiesKHR = ExternalMemoryProperties; + struct ExternalSemaphoreProperties; + using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties; + struct FenceCreateInfo; + struct FenceGetFdInfoKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct FenceGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct FilterCubicImageViewImageFormatPropertiesEXT; + struct FormatProperties; + struct FormatProperties2; + using FormatProperties2KHR = FormatProperties2; + struct FramebufferAttachmentImageInfoKHR; + struct FramebufferAttachmentsCreateInfoKHR; + struct FramebufferCreateInfo; + struct FramebufferMixedSamplesCombinationNV; + struct GeometryAABBNV; + struct GeometryDataNV; + struct GeometryNV; + struct GeometryTrianglesNV; + struct GraphicsPipelineCreateInfo; + struct HdrMetadataEXT; + struct HeadlessSurfaceCreateInfoEXT; +#ifdef VK_USE_PLATFORM_IOS_MVK + struct IOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + struct ImageBlit; + struct ImageCopy; + struct ImageCreateInfo; + struct ImageDrmFormatModifierExplicitCreateInfoEXT; + struct ImageDrmFormatModifierListCreateInfoEXT; + struct ImageDrmFormatModifierPropertiesEXT; + struct ImageFormatListCreateInfoKHR; + struct ImageFormatProperties; + struct ImageFormatProperties2; + using ImageFormatProperties2KHR = ImageFormatProperties2; + struct ImageMemoryBarrier; + struct ImageMemoryRequirementsInfo2; + using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2; +#ifdef VK_USE_PLATFORM_FUCHSIA + struct ImagePipeSurfaceCreateInfoFUCHSIA; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + struct ImagePlaneMemoryRequirementsInfo; + using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo; + struct ImageResolve; + struct ImageSparseMemoryRequirementsInfo2; + using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2; + struct ImageStencilUsageCreateInfoEXT; + struct ImageSubresource; + struct ImageSubresourceLayers; + struct ImageSubresourceRange; + struct ImageSwapchainCreateInfoKHR; + struct ImageViewASTCDecodeModeEXT; + struct ImageViewCreateInfo; + struct ImageViewHandleInfoNVX; + struct ImageViewUsageCreateInfo; + using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ImportAndroidHardwareBufferInfoANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct ImportFenceFdInfoKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportFenceWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ImportMemoryFdInfoKHR; + struct ImportMemoryHostPointerInfoEXT; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ImportSemaphoreFdInfoKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportSemaphoreWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct IndirectCommandsLayoutCreateInfoNVX; + struct IndirectCommandsLayoutTokenNVX; + struct IndirectCommandsTokenNVX; + struct InitializePerformanceApiInfoINTEL; + struct InputAttachmentAspectReference; + using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference; + struct InstanceCreateInfo; + struct LayerProperties; +#ifdef VK_USE_PLATFORM_MACOS_MVK + struct MacOSSurfaceCreateInfoMVK; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + struct MappedMemoryRange; + struct MemoryAllocateFlagsInfo; + using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo; + struct MemoryAllocateInfo; + struct MemoryBarrier; + struct MemoryDedicatedAllocateInfo; + using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo; + struct MemoryDedicatedRequirements; + using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements; + struct MemoryFdPropertiesKHR; +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct MemoryGetAndroidHardwareBufferInfoANDROID; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct MemoryGetFdInfoKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct MemoryHeap; + struct MemoryHostPointerPropertiesEXT; + struct MemoryPriorityAllocateInfoEXT; + struct MemoryRequirements; + struct MemoryRequirements2; + using MemoryRequirements2KHR = MemoryRequirements2; + struct MemoryType; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_METAL_EXT + struct MetalSurfaceCreateInfoEXT; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + struct MultisamplePropertiesEXT; + struct ObjectTableCreateInfoNVX; + struct ObjectTableDescriptorSetEntryNVX; + struct ObjectTableEntryNVX; + struct ObjectTableIndexBufferEntryNVX; + struct ObjectTablePipelineEntryNVX; + struct ObjectTablePushConstantEntryNVX; + struct ObjectTableVertexBufferEntryNVX; + struct Offset2D; + struct Offset3D; + struct PastPresentationTimingGOOGLE; + struct PerformanceConfigurationAcquireInfoINTEL; + struct PerformanceMarkerInfoINTEL; + struct PerformanceOverrideInfoINTEL; + struct PerformanceStreamMarkerInfoINTEL; + union PerformanceValueDataINTEL; + struct PerformanceValueINTEL; + struct PhysicalDevice16BitStorageFeatures; + using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures; + struct PhysicalDevice8BitStorageFeaturesKHR; + struct PhysicalDeviceASTCDecodeFeaturesEXT; + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT; + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT; + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT; + using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT; + struct PhysicalDeviceCoherentMemoryFeaturesAMD; + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; + struct PhysicalDeviceConditionalRenderingFeaturesEXT; + struct PhysicalDeviceConservativeRasterizationPropertiesEXT; + struct PhysicalDeviceCooperativeMatrixFeaturesNV; + struct PhysicalDeviceCooperativeMatrixPropertiesNV; + struct PhysicalDeviceCornerSampledImageFeaturesNV; + struct PhysicalDeviceCoverageReductionModeFeaturesNV; + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + struct PhysicalDeviceDepthClipEnableFeaturesEXT; + struct PhysicalDeviceDepthStencilResolvePropertiesKHR; + struct PhysicalDeviceDescriptorIndexingFeaturesEXT; + struct PhysicalDeviceDescriptorIndexingPropertiesEXT; + struct PhysicalDeviceDiscardRectanglePropertiesEXT; + struct PhysicalDeviceDriverPropertiesKHR; + struct PhysicalDeviceExclusiveScissorFeaturesNV; + struct PhysicalDeviceExternalBufferInfo; + using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; + struct PhysicalDeviceExternalFenceInfo; + using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo; + struct PhysicalDeviceExternalImageFormatInfo; + using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; + struct PhysicalDeviceExternalMemoryHostPropertiesEXT; + struct PhysicalDeviceExternalSemaphoreInfo; + using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo; + struct PhysicalDeviceFeatures; + struct PhysicalDeviceFeatures2; + using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2; + struct PhysicalDeviceFloatControlsPropertiesKHR; + struct PhysicalDeviceFragmentDensityMapFeaturesEXT; + struct PhysicalDeviceFragmentDensityMapPropertiesEXT; + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV; + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT; + struct PhysicalDeviceGroupProperties; + using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct PhysicalDeviceHostQueryResetFeaturesEXT; + struct PhysicalDeviceIDProperties; + using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties; + struct PhysicalDeviceImageDrmFormatModifierInfoEXT; + struct PhysicalDeviceImageFormatInfo2; + using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2; + struct PhysicalDeviceImageViewImageFormatInfoEXT; + struct PhysicalDeviceImagelessFramebufferFeaturesKHR; + struct PhysicalDeviceIndexTypeUint8FeaturesEXT; + struct PhysicalDeviceInlineUniformBlockFeaturesEXT; + struct PhysicalDeviceInlineUniformBlockPropertiesEXT; + struct PhysicalDeviceLimits; + struct PhysicalDeviceLineRasterizationFeaturesEXT; + struct PhysicalDeviceLineRasterizationPropertiesEXT; + struct PhysicalDeviceMaintenance3Properties; + using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties; + struct PhysicalDeviceMemoryBudgetPropertiesEXT; + struct PhysicalDeviceMemoryPriorityFeaturesEXT; + struct PhysicalDeviceMemoryProperties; + struct PhysicalDeviceMemoryProperties2; + using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2; + struct PhysicalDeviceMeshShaderFeaturesNV; + struct PhysicalDeviceMeshShaderPropertiesNV; + struct PhysicalDeviceMultiviewFeatures; + using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures; + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + struct PhysicalDeviceMultiviewProperties; + using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties; + struct PhysicalDevicePCIBusInfoPropertiesEXT; + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + struct PhysicalDevicePointClippingProperties; + using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties; + struct PhysicalDeviceProperties; + struct PhysicalDeviceProperties2; + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + struct PhysicalDeviceProtectedMemoryFeatures; + struct PhysicalDeviceProtectedMemoryProperties; + struct PhysicalDevicePushDescriptorPropertiesKHR; + struct PhysicalDeviceRayTracingPropertiesNV; + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV; + struct PhysicalDeviceSampleLocationsPropertiesEXT; + struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + struct PhysicalDeviceSamplerYcbcrConversionFeatures; + using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures; + struct PhysicalDeviceScalarBlockLayoutFeaturesEXT; + struct PhysicalDeviceShaderAtomicInt64FeaturesKHR; + struct PhysicalDeviceShaderCoreProperties2AMD; + struct PhysicalDeviceShaderCorePropertiesAMD; + struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + struct PhysicalDeviceShaderDrawParametersFeatures; + using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures; + struct PhysicalDeviceShaderFloat16Int8FeaturesKHR; + using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8FeaturesKHR; + struct PhysicalDeviceShaderImageFootprintFeaturesNV; + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV; + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV; + struct PhysicalDeviceShadingRateImageFeaturesNV; + struct PhysicalDeviceShadingRateImagePropertiesNV; + struct PhysicalDeviceSparseImageFormatInfo2; + using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2; + struct PhysicalDeviceSparseProperties; + struct PhysicalDeviceSubgroupProperties; + struct PhysicalDeviceSubgroupSizeControlFeaturesEXT; + struct PhysicalDeviceSubgroupSizeControlPropertiesEXT; + struct PhysicalDeviceSurfaceInfo2KHR; + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT; + struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT; + struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + struct PhysicalDeviceTransformFeedbackFeaturesEXT; + struct PhysicalDeviceTransformFeedbackPropertiesEXT; + struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + struct PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT; + struct PhysicalDeviceVulkanMemoryModelFeaturesKHR; + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT; + struct PipelineCacheCreateInfo; + struct PipelineColorBlendAdvancedStateCreateInfoEXT; + struct PipelineColorBlendAttachmentState; + struct PipelineColorBlendStateCreateInfo; + struct PipelineCompilerControlCreateInfoAMD; + struct PipelineCoverageModulationStateCreateInfoNV; + struct PipelineCoverageReductionStateCreateInfoNV; + struct PipelineCoverageToColorStateCreateInfoNV; + struct PipelineCreationFeedbackCreateInfoEXT; + struct PipelineCreationFeedbackEXT; + struct PipelineDepthStencilStateCreateInfo; + struct PipelineDiscardRectangleStateCreateInfoEXT; + struct PipelineDynamicStateCreateInfo; + struct PipelineExecutableInfoKHR; + struct PipelineExecutableInternalRepresentationKHR; + struct PipelineExecutablePropertiesKHR; + struct PipelineExecutableStatisticKHR; + union PipelineExecutableStatisticValueKHR; + struct PipelineInfoKHR; + struct PipelineInputAssemblyStateCreateInfo; + struct PipelineLayoutCreateInfo; + struct PipelineMultisampleStateCreateInfo; + struct PipelineRasterizationConservativeStateCreateInfoEXT; + struct PipelineRasterizationDepthClipStateCreateInfoEXT; + struct PipelineRasterizationLineStateCreateInfoEXT; + struct PipelineRasterizationStateCreateInfo; + struct PipelineRasterizationStateRasterizationOrderAMD; + struct PipelineRasterizationStateStreamCreateInfoEXT; + struct PipelineRepresentativeFragmentTestStateCreateInfoNV; + struct PipelineSampleLocationsStateCreateInfoEXT; + struct PipelineShaderStageCreateInfo; + struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + struct PipelineTessellationDomainOriginStateCreateInfo; + using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; + struct PipelineTessellationStateCreateInfo; + struct PipelineVertexInputDivisorStateCreateInfoEXT; + struct PipelineVertexInputStateCreateInfo; + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV; + struct PipelineViewportExclusiveScissorStateCreateInfoNV; + struct PipelineViewportShadingRateImageStateCreateInfoNV; + struct PipelineViewportStateCreateInfo; + struct PipelineViewportSwizzleStateCreateInfoNV; + struct PipelineViewportWScalingStateCreateInfoNV; +#ifdef VK_USE_PLATFORM_GGP + struct PresentFrameTokenGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + struct PresentInfoKHR; + struct PresentRegionKHR; + struct PresentRegionsKHR; + struct PresentTimeGOOGLE; + struct PresentTimesInfoGOOGLE; + struct ProtectedSubmitInfo; + struct PushConstantRange; + struct QueryPoolCreateInfo; + struct QueryPoolCreateInfoINTEL; + struct QueueFamilyCheckpointPropertiesNV; + struct QueueFamilyProperties; + struct QueueFamilyProperties2; + using QueueFamilyProperties2KHR = QueueFamilyProperties2; + struct RayTracingPipelineCreateInfoNV; + struct RayTracingShaderGroupCreateInfoNV; + struct Rect2D; + struct RectLayerKHR; + struct RefreshCycleDurationGOOGLE; + struct RenderPassAttachmentBeginInfoKHR; + struct RenderPassBeginInfo; + struct RenderPassCreateInfo; + struct RenderPassCreateInfo2KHR; + struct RenderPassFragmentDensityMapCreateInfoEXT; + struct RenderPassInputAttachmentAspectCreateInfo; + using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo; + struct RenderPassMultiviewCreateInfo; + using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo; + struct RenderPassSampleLocationsBeginInfoEXT; + struct SampleLocationEXT; + struct SampleLocationsInfoEXT; + struct SamplerCreateInfo; + struct SamplerReductionModeCreateInfoEXT; + struct SamplerYcbcrConversionCreateInfo; + using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo; + struct SamplerYcbcrConversionImageFormatProperties; + using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties; + struct SamplerYcbcrConversionInfo; + using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo; + struct SemaphoreCreateInfo; + struct SemaphoreGetFdInfoKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SemaphoreGetWin32HandleInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct ShaderModuleCreateInfo; + struct ShaderModuleValidationCacheCreateInfoEXT; + struct ShaderResourceUsageAMD; + struct ShaderStatisticsInfoAMD; + struct ShadingRatePaletteNV; + struct SharedPresentSurfaceCapabilitiesKHR; + struct SparseBufferMemoryBindInfo; + struct SparseImageFormatProperties; + struct SparseImageFormatProperties2; + using SparseImageFormatProperties2KHR = SparseImageFormatProperties2; + struct SparseImageMemoryBind; + struct SparseImageMemoryBindInfo; + struct SparseImageMemoryRequirements; + struct SparseImageMemoryRequirements2; + using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2; + struct SparseImageOpaqueMemoryBindInfo; + struct SparseMemoryBind; + struct SpecializationInfo; + struct SpecializationMapEntry; + struct StencilOpState; +#ifdef VK_USE_PLATFORM_GGP + struct StreamDescriptorSurfaceCreateInfoGGP; +#endif /*VK_USE_PLATFORM_GGP*/ + struct SubmitInfo; + struct SubpassBeginInfoKHR; + struct SubpassDependency; + struct SubpassDependency2KHR; + struct SubpassDescription; + struct SubpassDescription2KHR; + struct SubpassDescriptionDepthStencilResolveKHR; + struct SubpassEndInfoKHR; + struct SubpassSampleLocationsEXT; + struct SubresourceLayout; + struct SurfaceCapabilities2EXT; + struct SurfaceCapabilities2KHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceCapabilitiesFullScreenExclusiveEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct SurfaceCapabilitiesKHR; + struct SurfaceFormat2KHR; + struct SurfaceFormatKHR; +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceFullScreenExclusiveInfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceFullScreenExclusiveWin32InfoEXT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct SurfaceProtectedCapabilitiesKHR; + struct SwapchainCounterCreateInfoEXT; + struct SwapchainCreateInfoKHR; + struct SwapchainDisplayNativeHdrCreateInfoAMD; + struct TextureLODGatherFormatPropertiesAMD; + struct ValidationCacheCreateInfoEXT; + struct ValidationFeaturesEXT; + struct ValidationFlagsEXT; + struct VertexInputAttributeDescription; + struct VertexInputBindingDescription; + struct VertexInputBindingDivisorDescriptionEXT; +#ifdef VK_USE_PLATFORM_VI_NN + struct ViSurfaceCreateInfoNN; +#endif /*VK_USE_PLATFORM_VI_NN*/ + struct Viewport; + struct ViewportSwizzleNV; + struct ViewportWScalingNV; +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + struct WaylandSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoNV; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32SurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + struct WriteDescriptorSet; + struct WriteDescriptorSetAccelerationStructureNV; + struct WriteDescriptorSetInlineUniformBlockEXT; + struct XYColorEXT; +#ifdef VK_USE_PLATFORM_XCB_KHR + struct XcbSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +#ifdef VK_USE_PLATFORM_XLIB_KHR + struct XlibSurfaceCreateInfoKHR; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/handles.hpp b/app/libraries/VulkanHpp/vulkan/internal/handles.hpp new file mode 100644 index 00000000..666949d5 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/handles.hpp @@ -0,0 +1,6132 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "forward.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + class SurfaceKHR + { + public: + using CType = VkSurfaceKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSurfaceKHR; + + public: + VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_surfaceKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT + : m_surfaceKHR( surfaceKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = surfaceKHR; + return *this; + } +#endif + + SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceKHR const& ) const = default; +#else + bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == rhs.m_surfaceKHR; + } + + bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != rhs.m_surfaceKHR; + } + + bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR < rhs.m_surfaceKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_surfaceKHR == VK_NULL_HANDLE; + } + + private: + VkSurfaceKHR m_surfaceKHR; + }; + static_assert( sizeof( SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = SurfaceKHR; + }; + + class DebugReportCallbackEXT + { + public: + using CType = VkDebugReportCallbackEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugReportCallbackEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( debugReportCallbackEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = debugReportCallbackEXT; + return *this; + } +#endif + + DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugReportCallbackEXT const& ) const = default; +#else + bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; + } + + bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; + } + + bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugReportCallbackEXT == VK_NULL_HANDLE; + } + + private: + VkDebugReportCallbackEXT m_debugReportCallbackEXT; + }; + static_assert( sizeof( DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DebugReportCallbackEXT; + }; + + class DebugUtilsMessengerEXT + { + public: + using CType = VkDebugUtilsMessengerEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDebugUtilsMessengerEXT; + + public: + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = debugUtilsMessengerEXT; + return *this; + } +#endif + + DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerEXT const& ) const = default; +#else + bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; + } + + bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; + } + + bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_debugUtilsMessengerEXT == VK_NULL_HANDLE; + } + + private: + VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT; + }; + static_assert( sizeof( DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DebugUtilsMessengerEXT; + }; + + class DisplayKHR + { + public: + using CType = VkDisplayKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_displayKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT + : m_displayKHR( displayKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = displayKHR; + return *this; + } +#endif + + DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayKHR const& ) const = default; +#else + bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == rhs.m_displayKHR; + } + + bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != rhs.m_displayKHR; + } + + bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR < rhs.m_displayKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayKHR m_displayKHR; + }; + static_assert( sizeof( DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DisplayKHR; + }; + + class SwapchainKHR + { + public: + using CType = VkSwapchainKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSwapchainKHR; + + public: + VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT + : m_swapchainKHR( swapchainKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = swapchainKHR; + return *this; + } +#endif + + SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainKHR const& ) const = default; +#else + bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == rhs.m_swapchainKHR; + } + + bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != rhs.m_swapchainKHR; + } + + bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR < rhs.m_swapchainKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_swapchainKHR == VK_NULL_HANDLE; + } + + private: + VkSwapchainKHR m_swapchainKHR; + }; + static_assert( sizeof( SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = SwapchainKHR; + }; + + class Semaphore + { + public: + using CType = VkSemaphore; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSemaphore; + + public: + VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_semaphore(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT + : m_semaphore( semaphore ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT + { + m_semaphore = semaphore; + return *this; + } +#endif + + Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Semaphore const& ) const = default; +#else + bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == rhs.m_semaphore; + } + + bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != rhs.m_semaphore; + } + + bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_semaphore < rhs.m_semaphore; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_semaphore == VK_NULL_HANDLE; + } + + private: + VkSemaphore m_semaphore; + }; + static_assert( sizeof( Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Semaphore; + }; + + class Fence + { + public: + using CType = VkFence; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFence; + + public: + VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_fence(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT + : m_fence( fence ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT + { + m_fence = fence; + return *this; + } +#endif + + Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_fence = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Fence const& ) const = default; +#else + bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence == rhs.m_fence; + } + + bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence != rhs.m_fence; + } + + bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_fence < rhs.m_fence; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT + { + return m_fence; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_fence != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_fence == VK_NULL_HANDLE; + } + + private: + VkFence m_fence; + }; + static_assert( sizeof( Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Fence; + }; + + class PerformanceConfigurationINTEL + { + public: + using CType = VkPerformanceConfigurationINTEL; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePerformanceConfigurationINTEL; + + public: + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( performanceConfigurationINTEL ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = performanceConfigurationINTEL; + return *this; + } +#endif + + PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceConfigurationINTEL const& ) const = default; +#else + bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; + } + + bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; + } + + bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_performanceConfigurationINTEL == VK_NULL_HANDLE; + } + + private: + VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL; + }; + static_assert( sizeof( PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = PerformanceConfigurationINTEL; + }; + + class QueryPool + { + public: + using CType = VkQueryPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueryPool; + + public: + VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_queryPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT + : m_queryPool( queryPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT + { + m_queryPool = queryPool; + return *this; + } +#endif + + QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPool const& ) const = default; +#else + bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == rhs.m_queryPool; + } + + bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != rhs.m_queryPool; + } + + bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queryPool < rhs.m_queryPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queryPool == VK_NULL_HANDLE; + } + + private: + VkQueryPool m_queryPool; + }; + static_assert( sizeof( QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = QueryPool; + }; + + class Buffer + { + public: + using CType = VkBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBuffer; + + public: + VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_buffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT + : m_buffer( buffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT + { + m_buffer = buffer; + return *this; + } +#endif + + Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_buffer = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Buffer const& ) const = default; +#else + bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer == rhs.m_buffer; + } + + bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer != rhs.m_buffer; + } + + bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_buffer < rhs.m_buffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_buffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_buffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_buffer == VK_NULL_HANDLE; + } + + private: + VkBuffer m_buffer; + }; + static_assert( sizeof( Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Buffer; + }; + + class PipelineLayout + { + public: + using CType = VkPipelineLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineLayout; + + public: + VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT + : m_pipelineLayout( pipelineLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = pipelineLayout; + return *this; + } +#endif + + PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLayout const& ) const = default; +#else + bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == rhs.m_pipelineLayout; + } + + bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != rhs.m_pipelineLayout; + } + + bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout < rhs.m_pipelineLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineLayout == VK_NULL_HANDLE; + } + + private: + VkPipelineLayout m_pipelineLayout; + }; + static_assert( sizeof( PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = PipelineLayout; + }; + + class DescriptorSet + { + public: + using CType = VkDescriptorSet; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSet; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT + : m_descriptorSet( descriptorSet ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = descriptorSet; + return *this; + } +#endif + + DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSet const& ) const = default; +#else + bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == rhs.m_descriptorSet; + } + + bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != rhs.m_descriptorSet; + } + + bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet < rhs.m_descriptorSet; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSet == VK_NULL_HANDLE; + } + + private: + VkDescriptorSet m_descriptorSet; + }; + static_assert( sizeof( DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DescriptorSet; + }; + + class Pipeline + { + public: + using CType = VkPipeline; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipeline; + + public: + VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipeline(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT + : m_pipeline( pipeline ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT + { + m_pipeline = pipeline; + return *this; + } +#endif + + Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Pipeline const& ) const = default; +#else + bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == rhs.m_pipeline; + } + + bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != rhs.m_pipeline; + } + + bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipeline < rhs.m_pipeline; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipeline == VK_NULL_HANDLE; + } + + private: + VkPipeline m_pipeline; + }; + static_assert( sizeof( Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Pipeline; + }; + + class ImageView + { + public: + using CType = VkImageView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImageView; + + public: + VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_imageView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT + : m_imageView( imageView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT + { + m_imageView = imageView; + return *this; + } +#endif + + ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_imageView = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageView const& ) const = default; +#else + bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView == rhs.m_imageView; + } + + bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView != rhs.m_imageView; + } + + bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_imageView < rhs.m_imageView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT + { + return m_imageView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_imageView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_imageView == VK_NULL_HANDLE; + } + + private: + VkImageView m_imageView; + }; + static_assert( sizeof( ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = ImageView; + }; + + class Image + { + public: + using CType = VkImage; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eImage; + + public: + VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_image(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT + : m_image( image ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT + { + m_image = image; + return *this; + } +#endif + + Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_image = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Image const& ) const = default; +#else + bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image == rhs.m_image; + } + + bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image != rhs.m_image; + } + + bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_image < rhs.m_image; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT + { + return m_image; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_image != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_image == VK_NULL_HANDLE; + } + + private: + VkImage m_image; + }; + static_assert( sizeof( Image ) == sizeof( VkImage ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Image; + }; + + class AccelerationStructureNV + { + public: + using CType = VkAccelerationStructureNV; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eAccelerationStructureNV; + + public: + VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( accelerationStructureNV ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = accelerationStructureNV; + return *this; + } +#endif + + AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureNV const& ) const = default; +#else + bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == rhs.m_accelerationStructureNV; + } + + bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != rhs.m_accelerationStructureNV; + } + + bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV < rhs.m_accelerationStructureNV; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_accelerationStructureNV == VK_NULL_HANDLE; + } + + private: + VkAccelerationStructureNV m_accelerationStructureNV; + }; + static_assert( sizeof( AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = AccelerationStructureNV; + }; + + class DescriptorUpdateTemplate + { + public: + using CType = VkDescriptorUpdateTemplate; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorUpdateTemplate; + + public: + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( descriptorUpdateTemplate ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = descriptorUpdateTemplate; + return *this; + } +#endif + + DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplate const& ) const = default; +#else + bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; + } + + bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; + } + + bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorUpdateTemplate == VK_NULL_HANDLE; + } + + private: + VkDescriptorUpdateTemplate m_descriptorUpdateTemplate; + }; + static_assert( sizeof( DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DescriptorUpdateTemplate; + }; + using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate; + + class Event + { + public: + using CType = VkEvent; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eEvent; + + public: + VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_event(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT + : m_event( event ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT + { + m_event = event; + return *this; + } +#endif + + Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_event = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Event const& ) const = default; +#else + bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event == rhs.m_event; + } + + bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event != rhs.m_event; + } + + bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_event < rhs.m_event; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT + { + return m_event; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_event != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_event == VK_NULL_HANDLE; + } + + private: + VkEvent m_event; + }; + static_assert( sizeof( Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Event; + }; + + class CommandBuffer + { + public: + using CType = VkCommandBuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandBuffer; + + public: + VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT + : m_commandBuffer( commandBuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = commandBuffer; + return *this; + } +#endif + + CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBuffer const& ) const = default; +#else + bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == rhs.m_commandBuffer; + } + + bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != rhs.m_commandBuffer; + } + + bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer < rhs.m_commandBuffer; + } +#endif + + template + Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfoKHR & subpassBeginInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy descriptorSets, ArrayProxy dynamicOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, ArrayProxy sizes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindVertexBuffers( uint32_t firstBinding, ArrayProxy buffers, ArrayProxy offsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearAttachments( ArrayProxy attachments, ArrayProxy rects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy ranges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV mode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void debugMarkerEndEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endConditionalRenderingEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endRenderPass2KHR( const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy counterBuffers, ArrayProxy counterBufferOffsets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeCommands( ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR* pSubpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void nextSubpass2KHR( const SubpassBeginInfoKHR & subpassBeginInfo, const SubpassEndInfoKHR & subpassEndInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void processCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdProcessCommandsInfoNVX* pProcessCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void processCommandsNVX( const CmdProcessCommandsInfoNVX & processCommandsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy values, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy descriptorWrites, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void reserveSpaceForCommandsNVX( const VULKAN_HPP_NAMESPACE::CmdReserveSpaceForCommandsInfoNVX* pReserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void reserveSpaceForCommandsNVX( const CmdReserveSpaceForCommandsInfoNVX & reserveSpaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy regions, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setBlendConstants( const float blendConstants[4], Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setCheckpointNV( const void* pCheckpointMarker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy customSampleOrders, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setDeviceMask( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy discardRectangles, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy exclusiveScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setLineWidth( float lineWidth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setScissor( uint32_t firstScissor, ArrayProxy scissors, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewport( uint32_t firstViewport, ArrayProxy viewports, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy shadingRatePalettes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy viewportWScalings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void waitEvents( ArrayProxy events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy memoryBarriers, ArrayProxy bufferMemoryBarriers, ArrayProxy imageMemoryBarriers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void writeAccelerationStructuresPropertiesNV( ArrayProxy accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type end(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandBuffer == VK_NULL_HANDLE; + } + + private: + VkCommandBuffer m_commandBuffer; + }; + static_assert( sizeof( CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = CommandBuffer; + }; + + class DeviceMemory + { + public: + using CType = VkDeviceMemory; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDeviceMemory; + + public: + VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT + : m_deviceMemory( deviceMemory ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = deviceMemory; + return *this; + } +#endif + + DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceMemory const& ) const = default; +#else + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == rhs.m_deviceMemory; + } + + bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != rhs.m_deviceMemory; + } + + bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory < rhs.m_deviceMemory; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_deviceMemory == VK_NULL_HANDLE; + } + + private: + VkDeviceMemory m_deviceMemory; + }; + static_assert( sizeof( DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DeviceMemory; + }; + + class BufferView + { + public: + using CType = VkBufferView; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eBufferView; + + public: + VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_bufferView(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT + : m_bufferView( bufferView ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT + { + m_bufferView = bufferView; + return *this; + } +#endif + + BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferView const& ) const = default; +#else + bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == rhs.m_bufferView; + } + + bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != rhs.m_bufferView; + } + + bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_bufferView < rhs.m_bufferView; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_bufferView == VK_NULL_HANDLE; + } + + private: + VkBufferView m_bufferView; + }; + static_assert( sizeof( BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = BufferView; + }; + + class CommandPool + { + public: + using CType = VkCommandPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eCommandPool; + + public: + VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_commandPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT + : m_commandPool( commandPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT + { + m_commandPool = commandPool; + return *this; + } +#endif + + CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandPool const& ) const = default; +#else + bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == rhs.m_commandPool; + } + + bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != rhs.m_commandPool; + } + + bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_commandPool < rhs.m_commandPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_commandPool == VK_NULL_HANDLE; + } + + private: + VkCommandPool m_commandPool; + }; + static_assert( sizeof( CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = CommandPool; + }; + + class PipelineCache + { + public: + using CType = VkPipelineCache; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePipelineCache; + + public: + VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT + : m_pipelineCache( pipelineCache ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = pipelineCache; + return *this; + } +#endif + + PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCache const& ) const = default; +#else + bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == rhs.m_pipelineCache; + } + + bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != rhs.m_pipelineCache; + } + + bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache < rhs.m_pipelineCache; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineCache == VK_NULL_HANDLE; + } + + private: + VkPipelineCache m_pipelineCache; + }; + static_assert( sizeof( PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = PipelineCache; + }; + + class DescriptorPool + { + public: + using CType = VkDescriptorPool; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorPool; + + public: + VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT + : m_descriptorPool( descriptorPool ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = descriptorPool; + return *this; + } +#endif + + DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPool const& ) const = default; +#else + bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == rhs.m_descriptorPool; + } + + bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != rhs.m_descriptorPool; + } + + bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool < rhs.m_descriptorPool; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorPool == VK_NULL_HANDLE; + } + + private: + VkDescriptorPool m_descriptorPool; + }; + static_assert( sizeof( DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DescriptorPool; + }; + + class DescriptorSetLayout + { + public: + using CType = VkDescriptorSetLayout; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDescriptorSetLayout; + + public: + VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( descriptorSetLayout ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = descriptorSetLayout; + return *this; + } +#endif + + DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayout const& ) const = default; +#else + bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == rhs.m_descriptorSetLayout; + } + + bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != rhs.m_descriptorSetLayout; + } + + bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout < rhs.m_descriptorSetLayout; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_descriptorSetLayout == VK_NULL_HANDLE; + } + + private: + VkDescriptorSetLayout m_descriptorSetLayout; + }; + static_assert( sizeof( DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DescriptorSetLayout; + }; + + class Framebuffer + { + public: + using CType = VkFramebuffer; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eFramebuffer; + + public: + VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_framebuffer(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT + : m_framebuffer( framebuffer ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = framebuffer; + return *this; + } +#endif + + Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Framebuffer const& ) const = default; +#else + bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == rhs.m_framebuffer; + } + + bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != rhs.m_framebuffer; + } + + bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer < rhs.m_framebuffer; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_framebuffer == VK_NULL_HANDLE; + } + + private: + VkFramebuffer m_framebuffer; + }; + static_assert( sizeof( Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Framebuffer; + }; + + class IndirectCommandsLayoutNVX + { + public: + using CType = VkIndirectCommandsLayoutNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eIndirectCommandsLayoutNVX; + + public: + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX() VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNVX( VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNVX( indirectCommandsLayoutNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + IndirectCommandsLayoutNVX & operator=(VkIndirectCommandsLayoutNVX indirectCommandsLayoutNVX) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNVX = indirectCommandsLayoutNVX; + return *this; + } +#endif + + IndirectCommandsLayoutNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNVX = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutNVX const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX == rhs.m_indirectCommandsLayoutNVX; + } + + bool operator!=(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX != rhs.m_indirectCommandsLayoutNVX; + } + + bool operator<(IndirectCommandsLayoutNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX < rhs.m_indirectCommandsLayoutNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNVX() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutNVX == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutNVX m_indirectCommandsLayoutNVX; + }; + static_assert( sizeof( IndirectCommandsLayoutNVX ) == sizeof( VkIndirectCommandsLayoutNVX ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = IndirectCommandsLayoutNVX; + }; + + class ObjectTableNVX + { + public: + using CType = VkObjectTableNVX; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eObjectTableNVX; + + public: + VULKAN_HPP_CONSTEXPR ObjectTableNVX() VULKAN_HPP_NOEXCEPT + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_objectTableNVX(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ObjectTableNVX( VkObjectTableNVX objectTableNVX ) VULKAN_HPP_NOEXCEPT + : m_objectTableNVX( objectTableNVX ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ObjectTableNVX & operator=(VkObjectTableNVX objectTableNVX) VULKAN_HPP_NOEXCEPT + { + m_objectTableNVX = objectTableNVX; + return *this; + } +#endif + + ObjectTableNVX & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_objectTableNVX = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableNVX const& ) const = default; +#else + bool operator==( ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX == rhs.m_objectTableNVX; + } + + bool operator!=(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX != rhs.m_objectTableNVX; + } + + bool operator<(ObjectTableNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX < rhs.m_objectTableNVX; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkObjectTableNVX() const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_objectTableNVX == VK_NULL_HANDLE; + } + + private: + VkObjectTableNVX m_objectTableNVX; + }; + static_assert( sizeof( ObjectTableNVX ) == sizeof( VkObjectTableNVX ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = ObjectTableNVX; + }; + + class RenderPass + { + public: + using CType = VkRenderPass; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eRenderPass; + + public: + VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_renderPass(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT + : m_renderPass( renderPass ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT + { + m_renderPass = renderPass; + return *this; + } +#endif + + RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPass const& ) const = default; +#else + bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == rhs.m_renderPass; + } + + bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != rhs.m_renderPass; + } + + bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_renderPass < rhs.m_renderPass; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_renderPass == VK_NULL_HANDLE; + } + + private: + VkRenderPass m_renderPass; + }; + static_assert( sizeof( RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = RenderPass; + }; + + class Sampler + { + public: + using CType = VkSampler; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSampler; + + public: + VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_sampler(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT + : m_sampler( sampler ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT + { + m_sampler = sampler; + return *this; + } +#endif + + Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_sampler = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Sampler const& ) const = default; +#else + bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler == rhs.m_sampler; + } + + bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler != rhs.m_sampler; + } + + bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_sampler < rhs.m_sampler; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT + { + return m_sampler; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_sampler != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_sampler == VK_NULL_HANDLE; + } + + private: + VkSampler m_sampler; + }; + static_assert( sizeof( Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Sampler; + }; + + class SamplerYcbcrConversion + { + public: + using CType = VkSamplerYcbcrConversion; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eSamplerYcbcrConversion; + + public: + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( samplerYcbcrConversion ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = samplerYcbcrConversion; + return *this; + } +#endif + + SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversion const& ) const = default; +#else + bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; + } + + bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; + } + + bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_samplerYcbcrConversion == VK_NULL_HANDLE; + } + + private: + VkSamplerYcbcrConversion m_samplerYcbcrConversion; + }; + static_assert( sizeof( SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = SamplerYcbcrConversion; + }; + using SamplerYcbcrConversionKHR = SamplerYcbcrConversion; + + class ShaderModule + { + public: + using CType = VkShaderModule; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eShaderModule; + + public: + VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_shaderModule(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT + : m_shaderModule( shaderModule ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = shaderModule; + return *this; + } +#endif + + ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModule const& ) const = default; +#else + bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == rhs.m_shaderModule; + } + + bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != rhs.m_shaderModule; + } + + bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule < rhs.m_shaderModule; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_shaderModule == VK_NULL_HANDLE; + } + + private: + VkShaderModule m_shaderModule; + }; + static_assert( sizeof( ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = ShaderModule; + }; + + class ValidationCacheEXT + { + public: + using CType = VkValidationCacheEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eValidationCacheEXT; + + public: + VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT + : m_validationCacheEXT( validationCacheEXT ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = validationCacheEXT; + return *this; + } +#endif + + ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationCacheEXT const& ) const = default; +#else + bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == rhs.m_validationCacheEXT; + } + + bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != rhs.m_validationCacheEXT; + } + + bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT < rhs.m_validationCacheEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_validationCacheEXT == VK_NULL_HANDLE; + } + + private: + VkValidationCacheEXT m_validationCacheEXT; + }; + static_assert( sizeof( ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = ValidationCacheEXT; + }; + + class Queue + { + public: + using CType = VkQueue; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eQueue; + + public: + VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_queue(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT + : m_queue( queue ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT + { + m_queue = queue; + return *this; + } +#endif + + Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_queue = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Queue const& ) const = default; +#else + bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue == rhs.m_queue; + } + + bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue != rhs.m_queue; + } + + bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_queue < rhs.m_queue; + } +#endif + + template + void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getCheckpointDataNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getCheckpointDataNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindSparse( ArrayProxy bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void endDebugUtilsLabelEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type submit( ArrayProxy submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT + { + return m_queue; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_queue != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_queue == VK_NULL_HANDLE; + } + + private: + VkQueue m_queue; + }; + static_assert( sizeof( Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Queue; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Device; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueAccelerationStructureNV = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueBuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueBufferView = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = PoolFree; }; + using UniqueCommandBuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueCommandPool = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorPool = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = PoolFree; }; + using UniqueDescriptorSet = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorSetLayout = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDescriptorUpdateTemplate = UniqueHandle; + using UniqueDescriptorUpdateTemplateKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectFree; }; + using UniqueDeviceMemory = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueEvent = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueFence = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueFramebuffer = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueImage = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueImageView = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueIndirectCommandsLayoutNVX = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueObjectTableNVX = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipeline = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipelineCache = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniquePipelineLayout = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueQueryPool = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueRenderPass = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSampler = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSamplerYcbcrConversion = UniqueHandle; + using UniqueSamplerYcbcrConversionKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSemaphore = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueShaderModule = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSwapchainKHR = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueValidationCacheEXT = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Device + { + public: + using CType = VkDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDevice; + + public: + VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_device(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT + : m_device( device ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT + { + m_device = device; + return *this; + } +#endif + + Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_device = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Device const& ) const = default; +#else + bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device == rhs.m_device; + } + + bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device != rhs.m_device; + } + + bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_device < rhs.m_device; + } +#endif + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + ResultValue acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindAccelerationStructureMemoryNV( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindBufferMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindBufferMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindImageMemory2( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type bindImageMemory2KHR( ArrayProxy bindInfos, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createBuffer( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createBufferView( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createEvent( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createEventUnique( const EventCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createFence( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createImage( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImageUnique( const ImageCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createImageView( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createIndirectCommandsLayoutNVX( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX* pIndirectCommandsLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createIndirectCommandsLayoutNVX( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createIndirectCommandsLayoutNVXUnique( const IndirectCommandsLayoutCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createObjectTableNVX( const VULKAN_HPP_NAMESPACE::ObjectTableCreateInfoNVX* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ObjectTableNVX* pObjectTable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createObjectTableNVX( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createObjectTableNVXUnique( const ObjectTableCreateInfoNVX & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createRenderPass2KHR( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2KHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createSampler( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type createSharedSwapchainsKHR( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType,Allocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy createInfos, Optional allocator, Allocator const& vectorAllocator, Dispatch const &d ) const; + template + typename ResultValueType>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyObjectTableNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type waitIdle(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type flushMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy commandBuffers, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy descriptorSets, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfoEXT & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getCalibratedTimestampsEXT( ArrayProxy timestampInfos, ArrayProxy timestamps, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getGroupPresentCapabilitiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy data, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type invalidateMappedMemoryRanges( ArrayProxy memoryRanges, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = MemoryMapFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy srcCaches, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectTableEntryNVX* const* ppObjectTableEntries, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type registerObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy pObjectTableEntries, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags = DescriptorPoolResetFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type resetFences( ArrayProxy fences, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setHdrMetadataEXT( ArrayProxy swapchains, ArrayProxy metadata, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags = CommandPoolTrimFlags(), Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void uninitializePerformanceApiINTEL(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + Result unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, uint32_t objectCount, const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes, const uint32_t* pObjectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type unregisterObjectsNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable, ArrayProxy objectEntryTypes, ArrayProxy objectIndices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + + template + void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateDescriptorSets( ArrayProxy descriptorWrites, ArrayProxy descriptorCopies, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result waitForFences( ArrayProxy fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT + { + return m_device; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_device != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_device == VK_NULL_HANDLE; + } + + private: + VkDevice m_device; + }; + static_assert( sizeof( Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Device; + }; + + class DisplayModeKHR + { + public: + using CType = VkDisplayModeKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eDisplayModeKHR; + + public: + VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT + : m_displayModeKHR( displayModeKHR ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = displayModeKHR; + return *this; + } +#endif + + DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeKHR const& ) const = default; +#else + bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == rhs.m_displayModeKHR; + } + + bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != rhs.m_displayModeKHR; + } + + bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR < rhs.m_displayModeKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_displayModeKHR == VK_NULL_HANDLE; + } + + private: + VkDisplayModeKHR m_displayModeKHR; + }; + static_assert( sizeof( DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = DisplayModeKHR; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDevice = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class PhysicalDevice + { + public: + using CType = VkPhysicalDevice; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::ePhysicalDevice; + + public: + VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT + : m_physicalDevice( physicalDevice ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = physicalDevice; + return *this; + } +#endif + + PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice const& ) const = default; +#else + bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == rhs.m_physicalDevice; + } + + bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != rhs.m_physicalDevice; + } + + bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice < rhs.m_physicalDevice; + } +#endif + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type acquireXlibDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + template + Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDevice( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateDeviceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateDeviceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getCalibrateableTimeDomainsEXT(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getCooperativeMatrixPropertiesNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlaneProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlaneProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlanePropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPlanePropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPropertiesKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getDisplayPropertiesKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFeatures2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFeatures2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getGeneratedCommandsPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsFeaturesNVX* pFeatures, VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX* pLimits, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::DeviceGeneratedCommandsLimitsNVX getGeneratedCommandsPropertiesNVX( DeviceGeneratedCommandsFeaturesNVX & features, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getMemoryProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getMemoryProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; + template + StructureChain getProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getQueueFamilyProperties2KHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + std::vector getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template + typename ResultValueType>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + template + Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT + template + Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + +#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#else + template + typename ResultValueType::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_physicalDevice == VK_NULL_HANDLE; + } + + private: + VkPhysicalDevice m_physicalDevice; + }; + static_assert( sizeof( PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = PhysicalDevice; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + class Instance; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDebugReportCallbackEXT = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueDebugUtilsMessengerEXT = UniqueHandle; + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueSurfaceKHR = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + class Instance + { + public: + using CType = VkInstance; + + static VULKAN_HPP_CONST_OR_CONSTEXPR ObjectType objectType = ObjectType::eInstance; + + public: + VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + : m_instance(VK_NULL_HANDLE) + {} + + VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT + : m_instance( instance ) + {} + +#if defined(VULKAN_HPP_TYPESAFE_CONVERSION) + Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT + { + m_instance = instance; + return *this; + } +#endif + + Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_instance = VK_NULL_HANDLE; + return *this; + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Instance const& ) const = default; +#else + bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance == rhs.m_instance; + } + + bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance != rhs.m_instance; + } + + bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_instance < rhs.m_instance; + } +#endif + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + template + Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + template + Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + +#ifdef VK_USE_PLATFORM_IOS_MVK + template + Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#ifdef VK_USE_PLATFORM_FUCHSIA + template + Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#ifdef VK_USE_PLATFORM_MACOS_MVK + template + Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + +#ifdef VK_USE_PLATFORM_METAL_EXT + template + Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + +#ifdef VK_USE_PLATFORM_GGP + template + Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_GGP*/ + +#ifdef VK_USE_PLATFORM_VI_NN + template + Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_VI_NN*/ + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + template + Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + template + Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_XCB_KHR + template + Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#ifdef VK_USE_PLATFORM_XLIB_KHR + template + Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDeviceGroups(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDeviceGroups(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDevices(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const; + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumeratePhysicalDevices(Allocator const& vectorAllocator, Dispatch const &d ) const; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) const VULKAN_HPP_NOEXCEPT; +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT + { + return m_instance; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_instance != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_instance == VK_NULL_HANDLE; + } + + private: + VkInstance m_instance; + }; + static_assert( sizeof( Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" ); + + template <> + struct cpp_type + { + using type = Instance; + }; + +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template class UniqueHandleTraits { public: using deleter = ObjectDestroy; }; + using UniqueInstance = UniqueHandle; +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + + template + Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type createInstance( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); +#ifndef VULKAN_HPP_NO_SMART_HANDLE + template + typename ResultValueType>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional allocator = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); +#endif /*VULKAN_HPP_NO_SMART_HANDLE*/ +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName = nullptr, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateInstanceLayerProperties(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); + template, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename ResultValueType>::type enumerateInstanceLayerProperties(Allocator const& vectorAllocator, Dispatch const &d ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + template + Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type enumerateInstanceVersion(Dispatch const &d = VULKAN_HPP_DEFAULT_DISPATCHER ); +#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/objects.hpp b/app/libraries/VulkanHpp/vulkan/internal/objects.hpp new file mode 100644 index 00000000..3aa6c729 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/objects.hpp @@ -0,0 +1,251 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "dispatch.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct AllocationCallbacks; + + template + class ObjectDestroy + { + public: + ObjectDestroy() + : m_owner() + , m_allocationCallbacks( nullptr ) + , m_dispatch( nullptr ) + {} + + ObjectDestroy( OwnerType owner, Optional allocationCallbacks = nullptr, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + class NoParent; + + template + class ObjectDestroy + { + public: + ObjectDestroy() + : m_allocationCallbacks( nullptr ) + , m_dispatch( nullptr ) + {} + + ObjectDestroy( Optional allocationCallbacks, Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + template + class ObjectFree + { + public: + ObjectFree() + : m_owner() + , m_allocationCallbacks( nullptr ) + , m_dispatch( nullptr ) + {} + + ObjectFree( OwnerType owner, Optional allocationCallbacks, Dispatch const &dispatch ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } + Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; } + + protected: + template + void destroy(T t) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.free( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner; + Optional m_allocationCallbacks; + Dispatch const* m_dispatch; + }; + + template + class PoolFree + { + public: + PoolFree( OwnerType owner = OwnerType(), PoolType pool = PoolType(), Dispatch const &dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + {} + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } + PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; } + + protected: + template + void destroy(T t) VULKAN_HPP_NOEXCEPT + { + m_owner.free( m_pool, t, *m_dispatch ); + } + + private: + OwnerType m_owner; + PoolType m_pool; + Dispatch const* m_dispatch; + }; + + template + class PrivateConstExpression1DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T * dst, T const* src ) VULKAN_HPP_NOEXCEPT + { + PrivateConstExpression1DArrayCopy::copy( dst, src ); + dst[I - 1] = src[I - 1]; + } + }; + + template + class PrivateConstExpression1DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T * /*dst*/, T const* /*src*/ ) VULKAN_HPP_NOEXCEPT + {} + }; + + template + class ConstExpression1DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T dst[N], const T src[N] ) VULKAN_HPP_NOEXCEPT + { + const size_t C = N / 2; + PrivateConstExpression1DArrayCopy::copy( dst, src ); + PrivateConstExpression1DArrayCopy::copy(dst + C, src + C); + } + + VULKAN_HPP_CONSTEXPR_14 static void copy( T dst[N], std::array const& src ) VULKAN_HPP_NOEXCEPT + { + const size_t C = N / 2; + PrivateConstExpression1DArrayCopy::copy(dst, src.data()); + PrivateConstExpression1DArrayCopy::copy(dst + C, src.data() + C); + } + }; + + template + class PrivateConstExpression2DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T * dst, T const* src ) VULKAN_HPP_NOEXCEPT + { + PrivateConstExpression2DArrayCopy::copy( dst, src ); + dst[(I - 1) * M + J - 1] = src[(I - 1) * M + J - 1]; + } + }; + + template + class PrivateConstExpression2DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T * dst, T const* src ) VULKAN_HPP_NOEXCEPT + { + PrivateConstExpression2DArrayCopy::copy( dst, src ); + } + }; + + template + class PrivateConstExpression2DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T * /*dst*/, T const* /*src*/ ) VULKAN_HPP_NOEXCEPT + {} + }; + + template + class ConstExpression2DArrayCopy + { + public: + VULKAN_HPP_CONSTEXPR_14 static void copy( T dst[N][M], const T src[N][M] ) VULKAN_HPP_NOEXCEPT + { + PrivateConstExpression2DArrayCopy::copy( &dst[0][0], &src[0][0] ); + } + + VULKAN_HPP_CONSTEXPR_14 static void copy( T dst[N][M], std::array, N> const& src ) VULKAN_HPP_NOEXCEPT + { + PrivateConstExpression2DArrayCopy::copy( &dst[0][0], src.data()->data() ); + } + }; + + using Bool32 = uint32_t; + using DeviceAddress = uint64_t; + using DeviceSize = uint64_t; + using SampleMask = uint32_t; +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcceleration.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcceleration.hpp new file mode 100644 index 00000000..f1ec9d21 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcceleration.hpp @@ -0,0 +1,789 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcceleration.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct GeometryTrianglesNV + { + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, + uint32_t vertexCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, + VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, + uint32_t indexCount_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, + VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexData( vertexData_ ) + , vertexOffset( vertexOffset_ ) + , vertexCount( vertexCount_ ) + , vertexStride( vertexStride_ ) + , vertexFormat( vertexFormat_ ) + , indexData( indexData_ ) + , indexOffset( indexOffset_ ) + , indexCount( indexCount_ ) + , indexType( indexType_ ) + , transformData( transformData_ ) + , transformOffset( transformOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , vertexData( rhs.vertexData ) + , vertexOffset( rhs.vertexOffset ) + , vertexCount( rhs.vertexCount ) + , vertexStride( rhs.vertexStride ) + , vertexFormat( rhs.vertexFormat ) + , indexData( rhs.indexData ) + , indexOffset( rhs.indexOffset ) + , indexCount( rhs.indexCount ) + , indexType( rhs.indexType ) + , transformData( rhs.transformData ) + , transformOffset( rhs.transformOffset ) + {} + + GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GeometryTrianglesNV ) - offsetof( GeometryTrianglesNV, pNext ) ); + return *this; + } + + GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeometryTrianglesNV& operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT + { + vertexData = vertexData_; + return *this; + } + + GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT + { + vertexStride = vertexStride_; + return *this; + } + + GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT + { + vertexFormat = vertexFormat_; + return *this; + } + + GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT + { + indexData = indexData_; + return *this; + } + + GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT + { + indexOffset = indexOffset_; + return *this; + } + + GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT + { + transformData = transformData_; + return *this; + } + + GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT + { + transformOffset = transformOffset_; + return *this; + } + + operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryTrianglesNV const& ) const = default; +#else + bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexData == rhs.vertexData ) + && ( vertexOffset == rhs.vertexOffset ) + && ( vertexCount == rhs.vertexCount ) + && ( vertexStride == rhs.vertexStride ) + && ( vertexFormat == rhs.vertexFormat ) + && ( indexData == rhs.indexData ) + && ( indexOffset == rhs.indexOffset ) + && ( indexCount == rhs.indexCount ) + && ( indexType == rhs.indexType ) + && ( transformData == rhs.transformData ) + && ( transformOffset == rhs.transformOffset ); + } + + bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer vertexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {}; + uint32_t vertexCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {}; + VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Buffer indexData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {}; + uint32_t indexCount = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + VULKAN_HPP_NAMESPACE::Buffer transformData = {}; + VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {}; + }; + static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeometryAABBNV + { + VULKAN_HPP_CONSTEXPR GeometryAABBNV( VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, + uint32_t numAABBs_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : aabbData( aabbData_ ) + , numAABBs( numAABBs_ ) + , stride( stride_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , aabbData( rhs.aabbData ) + , numAABBs( rhs.numAABBs ) + , stride( rhs.stride ) + , offset( rhs.offset ) + {} + + GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GeometryAABBNV ) - offsetof( GeometryAABBNV, pNext ) ); + return *this; + } + + GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeometryAABBNV& operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT + { + aabbData = aabbData_; + return *this; + } + + GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT + { + numAABBs = numAABBs_; + return *this; + } + + GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryAABBNV const& ) const = default; +#else + bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( aabbData == rhs.aabbData ) + && ( numAABBs == rhs.numAABBs ) + && ( stride == rhs.stride ) + && ( offset == rhs.offset ); + } + + bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer aabbData = {}; + uint32_t numAABBs = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeometryDataNV + { + VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT + : triangles( triangles_ ) + , aabbs( aabbs_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const& rhs ) VULKAN_HPP_NOEXCEPT + : triangles( rhs.triangles ) + , aabbs( rhs.aabbs ) + {} + + GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( GeometryDataNV ) ); + return *this; + } + + GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeometryDataNV& operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ ) VULKAN_HPP_NOEXCEPT + { + triangles = triangles_; + return *this; + } + + GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ ) VULKAN_HPP_NOEXCEPT + { + aabbs = aabbs_; + return *this; + } + + operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryDataNV const& ) const = default; +#else + bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( triangles == rhs.triangles ) + && ( aabbs == rhs.aabbs ); + } + + bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {}; + VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {}; + }; + static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GeometryNV + { + VULKAN_HPP_CONSTEXPR GeometryNV( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles, + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, + VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ = {} ) VULKAN_HPP_NOEXCEPT + : geometryType( geometryType_ ) + , geometry( geometry_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , geometryType( rhs.geometryType ) + , geometry( rhs.geometry ) + , flags( rhs.flags ) + {} + + GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GeometryNV ) - offsetof( GeometryNV, pNext ) ); + return *this; + } + + GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GeometryNV& operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType_ ) VULKAN_HPP_NOEXCEPT + { + geometryType = geometryType_; + return *this; + } + + GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ ) VULKAN_HPP_NOEXCEPT + { + geometry = geometry_; + return *this; + } + + GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GeometryNV const& ) const = default; +#else + bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( geometryType == rhs.geometryType ) + && ( geometry == rhs.geometry ) + && ( flags == rhs.flags ); + } + + bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::GeometryTypeNV geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeNV::eTriangles; + VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {}; + VULKAN_HPP_NAMESPACE::GeometryFlagsNV flags = {}; + }; + static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AccelerationStructureInfoNV + { + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel, + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, + uint32_t instanceCount_ = {}, + uint32_t geometryCount_ = {}, + const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , instanceCount( instanceCount_ ) + , geometryCount( geometryCount_ ) + , pGeometries( pGeometries_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , flags( rhs.flags ) + , instanceCount( rhs.instanceCount ) + , geometryCount( rhs.geometryCount ) + , pGeometries( rhs.pGeometries ) + {} + + AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureInfoNV ) - offsetof( AccelerationStructureInfoNV, pNext ) ); + return *this; + } + + AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureInfoNV& operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT + { + geometryCount = geometryCount_; + return *this; + } + + AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT + { + pGeometries = pGeometries_; + return *this; + } + + operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureInfoNV const& ) const = default; +#else + bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( instanceCount == rhs.instanceCount ) + && ( geometryCount == rhs.geometryCount ) + && ( pGeometries == rhs.pGeometries ); + } + + bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV::eTopLevel; + VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {}; + uint32_t instanceCount = {}; + uint32_t geometryCount = {}; + const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {}; + }; + static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AccelerationStructureCreateInfoNV + { + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {} ) VULKAN_HPP_NOEXCEPT + : compactedSize( compactedSize_ ) + , info( info_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , compactedSize( rhs.compactedSize ) + , info( rhs.info ) + {} + + AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureCreateInfoNV ) - offsetof( AccelerationStructureCreateInfoNV, pNext ) ); + return *this; + } + + AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureCreateInfoNV& operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT + { + compactedSize = compactedSize_; + return *this; + } + + AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } + + operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default; +#else + bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( compactedSize == rhs.compactedSize ) + && ( info == rhs.info ); + } + + bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {}; + }; + static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AccelerationStructureMemoryRequirementsInfoNV + { + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , accelerationStructure( accelerationStructure_ ) + {} + + VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , accelerationStructure( rhs.accelerationStructure ) + {} + + AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AccelerationStructureMemoryRequirementsInfoNV ) - offsetof( AccelerationStructureMemoryRequirementsInfoNV, pNext ) ); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AccelerationStructureMemoryRequirementsInfoNV& operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default; +#else + bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( accelerationStructure == rhs.accelerationStructure ); + } + + bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + }; + static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcquire.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcquire.hpp new file mode 100644 index 00000000..a4f9c762 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAcquire.hpp @@ -0,0 +1,158 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcceleration.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct AcquireNextImageInfoKHR + { + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint64_t timeout_ = {}, + VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , timeout( timeout_ ) + , semaphore( semaphore_ ) + , fence( fence_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchain( rhs.swapchain ) + , timeout( rhs.timeout ) + , semaphore( rhs.semaphore ) + , fence( rhs.fence ) + , deviceMask( rhs.deviceMask ) + {} + + AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AcquireNextImageInfoKHR ) - offsetof( AcquireNextImageInfoKHR, pNext ) ); + return *this; + } + + AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AcquireNextImageInfoKHR& operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT + { + timeout = timeout_; + return *this; + } + + AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AcquireNextImageInfoKHR const& ) const = default; +#else + bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( timeout == rhs.timeout ) + && ( semaphore == rhs.semaphore ) + && ( fence == rhs.fence ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint64_t timeout = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkAllocation.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAllocation.hpp new file mode 100644 index 00000000..2cc098fc --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAllocation.hpp @@ -0,0 +1,159 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct AllocationCallbacks + { + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void* pUserData_ = {}, + PFN_vkAllocationFunction pfnAllocation_ = {}, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + , pfnAllocation( pfnAllocation_ ) + , pfnReallocation( pfnReallocation_ ) + , pfnFree( pfnFree_ ) + , pfnInternalAllocation( pfnInternalAllocation_ ) + , pfnInternalFree( pfnInternalFree_ ) + {} + + VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const& rhs ) VULKAN_HPP_NOEXCEPT + : pUserData( rhs.pUserData ) + , pfnAllocation( rhs.pfnAllocation ) + , pfnReallocation( rhs.pfnReallocation ) + , pfnFree( rhs.pfnFree ) + , pfnInternalAllocation( rhs.pfnInternalAllocation ) + , pfnInternalFree( rhs.pfnInternalFree ) + {} + + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AllocationCallbacks ) ); + return *this; + } + + AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AllocationCallbacks& operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnAllocation = pfnAllocation_; + return *this; + } + + AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnReallocation = pfnReallocation_; + return *this; + } + + AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnFree = pfnFree_; + return *this; + } + + AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalAllocation = pfnInternalAllocation_; + return *this; + } + + AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + pfnInternalFree = pfnInternalFree_; + return *this; + } + + operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AllocationCallbacks const& ) const = default; +#else + bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( pUserData == rhs.pUserData ) + && ( pfnAllocation == rhs.pfnAllocation ) + && ( pfnReallocation == rhs.pfnReallocation ) + && ( pfnFree == rhs.pfnFree ) + && ( pfnInternalAllocation == rhs.pfnInternalAllocation ) + && ( pfnInternalFree == rhs.pfnInternalFree ); + } + + bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + void* pUserData = {}; + PFN_vkAllocationFunction pfnAllocation = {}; + PFN_vkReallocationFunction pfnReallocation = {}; + PFN_vkFreeFunction pfnFree = {}; + PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; + PFN_vkInternalFreeNotification pfnInternalFree = {}; + }; + static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkAndroid.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAndroid.hpp new file mode 100644 index 00000000..37322bba --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAndroid.hpp @@ -0,0 +1,458 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkAllocation.hpp" +#include "VkAndroid.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ComponentMapping + { + VULKAN_HPP_CONSTEXPR ComponentMapping( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, + VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT + : r( r_ ) + , g( g_ ) + , b( b_ ) + , a( a_ ) + {} + + VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const& rhs ) VULKAN_HPP_NOEXCEPT + : r( rhs.r ) + , g( rhs.g ) + , b( rhs.b ) + , a( rhs.a ) + {} + + ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ComponentMapping ) ); + return *this; + } + + ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ComponentMapping& operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT + { + r = r_; + return *this; + } + + ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT + { + g = g_; + return *this; + } + + ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT + { + b = b_; + return *this; + } + + ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT + { + a = a_; + return *this; + } + + operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ComponentMapping const& ) const = default; +#else + bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( r == rhs.r ) + && ( g == rhs.g ) + && ( b == rhs.b ) + && ( a == rhs.a ); + } + + bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity; + }; + static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferFormatPropertiesANDROID + { + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint64_t externalFormat_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , externalFormat( externalFormat_ ) + , formatFeatures( formatFeatures_ ) + , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) + , suggestedYcbcrModel( suggestedYcbcrModel_ ) + , suggestedYcbcrRange( suggestedYcbcrRange_ ) + , suggestedXChromaOffset( suggestedXChromaOffset_ ) + , suggestedYChromaOffset( suggestedYChromaOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , format( rhs.format ) + , externalFormat( rhs.externalFormat ) + , formatFeatures( rhs.formatFeatures ) + , samplerYcbcrConversionComponents( rhs.samplerYcbcrConversionComponents ) + , suggestedYcbcrModel( rhs.suggestedYcbcrModel ) + , suggestedYcbcrRange( rhs.suggestedYcbcrRange ) + , suggestedXChromaOffset( rhs.suggestedXChromaOffset ) + , suggestedYChromaOffset( rhs.suggestedYChromaOffset ) + {} + + AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) - offsetof( AndroidHardwareBufferFormatPropertiesANDROID, pNext ) ); + return *this; + } + + AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AndroidHardwareBufferFormatPropertiesANDROID& operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default; +#else + bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( externalFormat == rhs.externalFormat ) + && ( formatFeatures == rhs.formatFeatures ) + && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents ) + && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel ) + && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange ) + && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset ) + && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset ); + } + + bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint64_t externalFormat = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {}; + VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + }; + static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferPropertiesANDROID + { + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , allocationSize( rhs.allocationSize ) + , memoryTypeBits( rhs.memoryTypeBits ) + {} + + AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferPropertiesANDROID ) - offsetof( AndroidHardwareBufferPropertiesANDROID, pNext ) ); + return *this; + } + + AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AndroidHardwareBufferPropertiesANDROID& operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default; +#else + bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidHardwareBufferUsageANDROID + { + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , androidHardwareBufferUsage( rhs.androidHardwareBufferUsage ) + {} + + AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AndroidHardwareBufferUsageANDROID ) - offsetof( AndroidHardwareBufferUsageANDROID, pNext ) ); + return *this; + } + + AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AndroidHardwareBufferUsageANDROID& operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default; +#else + bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage ); + } + + bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID; + void* pNext = {}; + uint64_t androidHardwareBufferUsage = {}; + }; + static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct AndroidSurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, + struct ANativeWindow* window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , window( rhs.window ) + {} + + AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AndroidSurfaceCreateInfoKHR ) - offsetof( AndroidSurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AndroidSurfaceCreateInfoKHR& operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {}; + struct ANativeWindow* window = {}; + }; + static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkApplication.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkApplication.hpp new file mode 100644 index 00000000..e1fb1e4e --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkApplication.hpp @@ -0,0 +1,161 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkAllocation.hpp" +#include "VkAndroid.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ApplicationInfo + { + VULKAN_HPP_CONSTEXPR ApplicationInfo( const char* pApplicationName_ = {}, + uint32_t applicationVersion_ = {}, + const char* pEngineName_ = {}, + uint32_t engineVersion_ = {}, + uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : pApplicationName( pApplicationName_ ) + , applicationVersion( applicationVersion_ ) + , pEngineName( pEngineName_ ) + , engineVersion( engineVersion_ ) + , apiVersion( apiVersion_ ) + {} + + VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pApplicationName( rhs.pApplicationName ) + , applicationVersion( rhs.applicationVersion ) + , pEngineName( rhs.pEngineName ) + , engineVersion( rhs.engineVersion ) + , apiVersion( rhs.apiVersion ) + {} + + ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ApplicationInfo ) - offsetof( ApplicationInfo, pNext ) ); + return *this; + } + + ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ApplicationInfo& operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationName = pApplicationName_; + return *this; + } + + ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT + { + applicationVersion = applicationVersion_; + return *this; + } + + ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT + { + pEngineName = pEngineName_; + return *this; + } + + ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT + { + engineVersion = engineVersion_; + return *this; + } + + ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT + { + apiVersion = apiVersion_; + return *this; + } + + operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ApplicationInfo const& ) const = default; +#else + bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pApplicationName == rhs.pApplicationName ) + && ( applicationVersion == rhs.applicationVersion ) + && ( pEngineName == rhs.pEngineName ) + && ( engineVersion == rhs.engineVersion ) + && ( apiVersion == rhs.apiVersion ); + } + + bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo; + const void* pNext = {}; + const char* pApplicationName = {}; + uint32_t applicationVersion = {}; + const char* pEngineName = {}; + uint32_t engineVersion = {}; + uint32_t apiVersion = {}; + }; + static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkAttachment.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAttachment.hpp new file mode 100644 index 00000000..2c637a44 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkAttachment.hpp @@ -0,0 +1,857 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkAndroid.hpp" +#include "VkAttachment.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct AttachmentDescription + { + VULKAN_HPP_CONSTEXPR AttachmentDescription( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const& rhs ) VULKAN_HPP_NOEXCEPT + : flags( rhs.flags ) + , format( rhs.format ) + , samples( rhs.samples ) + , loadOp( rhs.loadOp ) + , storeOp( rhs.storeOp ) + , stencilLoadOp( rhs.stencilLoadOp ) + , stencilStoreOp( rhs.stencilStoreOp ) + , initialLayout( rhs.initialLayout ) + , finalLayout( rhs.finalLayout ) + {} + + AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AttachmentDescription ) ); + return *this; + } + + AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AttachmentDescription& operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentDescription const& ) const = default; +#else + bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) + && ( format == rhs.format ) + && ( samples == rhs.samples ) + && ( loadOp == rhs.loadOp ) + && ( storeOp == rhs.storeOp ) + && ( stencilLoadOp == rhs.stencilLoadOp ) + && ( stencilStoreOp == rhs.stencilStoreOp ) + && ( initialLayout == rhs.initialLayout ) + && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentDescription2KHR + { + VULKAN_HPP_CONSTEXPR AttachmentDescription2KHR( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , format( format_ ) + , samples( samples_ ) + , loadOp( loadOp_ ) + , storeOp( storeOp_ ) + , stencilLoadOp( stencilLoadOp_ ) + , stencilStoreOp( stencilStoreOp_ ) + , initialLayout( initialLayout_ ) + , finalLayout( finalLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentDescription2KHR( AttachmentDescription2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , format( rhs.format ) + , samples( rhs.samples ) + , loadOp( rhs.loadOp ) + , storeOp( rhs.storeOp ) + , stencilLoadOp( rhs.stencilLoadOp ) + , stencilStoreOp( rhs.stencilStoreOp ) + , initialLayout( rhs.initialLayout ) + , finalLayout( rhs.finalLayout ) + {} + + AttachmentDescription2KHR & operator=( AttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AttachmentDescription2KHR ) - offsetof( AttachmentDescription2KHR, pNext ) ); + return *this; + } + + AttachmentDescription2KHR( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AttachmentDescription2KHR& operator=( VkAttachmentDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AttachmentDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentDescription2KHR & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + AttachmentDescription2KHR & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + AttachmentDescription2KHR & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + AttachmentDescription2KHR & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT + { + loadOp = loadOp_; + return *this; + } + + AttachmentDescription2KHR & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT + { + storeOp = storeOp_; + return *this; + } + + AttachmentDescription2KHR & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilLoadOp = stencilLoadOp_; + return *this; + } + + AttachmentDescription2KHR & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT + { + stencilStoreOp = stencilStoreOp_; + return *this; + } + + AttachmentDescription2KHR & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + AttachmentDescription2KHR & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT + { + finalLayout = finalLayout_; + return *this; + } + + operator VkAttachmentDescription2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentDescription2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentDescription2KHR const& ) const = default; +#else + bool operator==( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( format == rhs.format ) + && ( samples == rhs.samples ) + && ( loadOp == rhs.loadOp ) + && ( storeOp == rhs.storeOp ) + && ( stencilLoadOp == rhs.stencilLoadOp ) + && ( stencilStoreOp == rhs.stencilStoreOp ) + && ( initialLayout == rhs.initialLayout ) + && ( finalLayout == rhs.finalLayout ); + } + + bool operator!=( AttachmentDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad; + VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentDescription2KHR ) == sizeof( VkAttachmentDescription2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentReference + { + VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const& rhs ) VULKAN_HPP_NOEXCEPT + : attachment( rhs.attachment ) + , layout( rhs.layout ) + {} + + AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AttachmentReference ) ); + return *this; + } + + AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AttachmentReference& operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentReference const& ) const = default; +#else + bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( attachment == rhs.attachment ) + && ( layout == rhs.layout ); + } + + bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentReference2KHR + { + VULKAN_HPP_CONSTEXPR AttachmentReference2KHR( uint32_t attachment_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : attachment( attachment_ ) + , layout( layout_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentReference2KHR( AttachmentReference2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , attachment( rhs.attachment ) + , layout( rhs.layout ) + , aspectMask( rhs.aspectMask ) + {} + + AttachmentReference2KHR & operator=( AttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( AttachmentReference2KHR ) - offsetof( AttachmentReference2KHR, pNext ) ); + return *this; + } + + AttachmentReference2KHR( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AttachmentReference2KHR& operator=( VkAttachmentReference2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AttachmentReference2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + AttachmentReference2KHR & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT + { + attachment = attachment_; + return *this; + } + + AttachmentReference2KHR & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + AttachmentReference2KHR & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + operator VkAttachmentReference2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentReference2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentReference2KHR const& ) const = default; +#else + bool operator==( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachment == rhs.attachment ) + && ( layout == rhs.layout ) + && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( AttachmentReference2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2KHR; + const void* pNext = {}; + uint32_t attachment = {}; + VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + static_assert( sizeof( AttachmentReference2KHR ) == sizeof( VkAttachmentReference2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Extent2D + { + VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, + uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const& rhs ) VULKAN_HPP_NOEXCEPT + : width( rhs.width ) + , height( rhs.height ) + {} + + Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Extent2D ) ); + return *this; + } + + Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Extent2D& operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Extent2D const& ) const = default; +#else + bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) + && ( height == rhs.height ); + } + + bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + }; + static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SampleLocationEXT + { + VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, + float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + {} + + SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SampleLocationEXT ) ); + return *this; + } + + SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SampleLocationEXT& operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SampleLocationEXT const& ) const = default; +#else + bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SampleLocationsInfoEXT + { + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, + uint32_t sampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsPerPixel( sampleLocationsPerPixel_ ) + , sampleLocationGridSize( sampleLocationGridSize_ ) + , sampleLocationsCount( sampleLocationsCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , sampleLocationsPerPixel( rhs.sampleLocationsPerPixel ) + , sampleLocationGridSize( rhs.sampleLocationGridSize ) + , sampleLocationsCount( rhs.sampleLocationsCount ) + , pSampleLocations( rhs.pSampleLocations ) + {} + + SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SampleLocationsInfoEXT ) - offsetof( SampleLocationsInfoEXT, pNext ) ); + return *this; + } + + SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SampleLocationsInfoEXT& operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsPerPixel = sampleLocationsPerPixel_; + return *this; + } + + SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationGridSize = sampleLocationGridSize_; + return *this; + } + + SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsCount = sampleLocationsCount_; + return *this; + } + + SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + + operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SampleLocationsInfoEXT const& ) const = default; +#else + bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel ) + && ( sampleLocationGridSize == rhs.sampleLocationGridSize ) + && ( sampleLocationsCount == rhs.sampleLocationsCount ) + && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {}; + uint32_t sampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {}; + }; + static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct AttachmentSampleLocationsEXT + { + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentIndex( attachmentIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : attachmentIndex( rhs.attachmentIndex ) + , sampleLocationsInfo( rhs.sampleLocationsInfo ) + {} + + AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( AttachmentSampleLocationsEXT ) ); + return *this; + } + + AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + AttachmentSampleLocationsEXT& operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + attachmentIndex = attachmentIndex_; + return *this; + } + + AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default; +#else + bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( attachmentIndex == rhs.attachmentIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t attachmentIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkBase.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBase.hpp new file mode 100644 index 00000000..60910eaa --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBase.hpp @@ -0,0 +1,174 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkAndroid.hpp" +#include "VkAttachment.hpp" +#include "VkBase.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct BaseInStructure + { + BaseInStructure() VULKAN_HPP_NOEXCEPT + {} + + BaseInStructure( BaseInStructure const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + {} + + BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BaseInStructure ) ); + return *this; + } + + BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BaseInStructure& operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BaseInStructure const& ) const = default; +#else + bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = {}; + const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {}; + }; + static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BaseOutStructure + { + BaseOutStructure() VULKAN_HPP_NOEXCEPT + {} + + BaseOutStructure( BaseOutStructure const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + {} + + BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BaseOutStructure ) ); + return *this; + } + + BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BaseOutStructure& operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BaseOutStructure const& ) const = default; +#else + bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = {}; + struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {}; + }; + static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkBind.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBind.hpp new file mode 100644 index 00000000..bc079950 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBind.hpp @@ -0,0 +1,1778 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBind.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct BindAccelerationStructureMemoryInfoNV + { + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + uint32_t deviceIndexCount_ = {}, + const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructure( accelerationStructure_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , accelerationStructure( rhs.accelerationStructure ) + , memory( rhs.memory ) + , memoryOffset( rhs.memoryOffset ) + , deviceIndexCount( rhs.deviceIndexCount ) + , pDeviceIndices( rhs.pDeviceIndices ) + {} + + BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindAccelerationStructureMemoryInfoNV ) - offsetof( BindAccelerationStructureMemoryInfoNV, pNext ) ); + return *this; + } + + BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindAccelerationStructureMemoryInfoNV& operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructure = accelerationStructure_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindAccelerationStructureMemoryInfoNV const& ) const = default; +#else + bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructure == rhs.accelerationStructure ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + uint32_t deviceIndexCount = {}; + const uint32_t* pDeviceIndices = {}; + }; + static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindBufferMemoryDeviceGroupInfo + { + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t* pDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceIndexCount( rhs.deviceIndexCount ) + , pDeviceIndices( rhs.pDeviceIndices ) + {} + + BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryDeviceGroupInfo ) - offsetof( BindBufferMemoryDeviceGroupInfo, pNext ) ); + return *this; + } + + BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindBufferMemoryDeviceGroupInfo& operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default; +#else + bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ); + } + + bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo; + const void* pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t* pDeviceIndices = {}; + }; + static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindBufferMemoryInfo + { + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , buffer( rhs.buffer ) + , memory( rhs.memory ) + , memoryOffset( rhs.memoryOffset ) + {} + + BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindBufferMemoryInfo ) - offsetof( BindBufferMemoryInfo, pNext ) ); + return *this; + } + + BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindBufferMemoryInfo& operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindBufferMemoryInfo const& ) const = default; +#else + bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Offset2D + { + VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, + int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + {} + + Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Offset2D ) ); + return *this; + } + + Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Offset2D& operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Offset2D const& ) const = default; +#else + bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + }; + static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Rect2D + { + VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const& rhs ) VULKAN_HPP_NOEXCEPT + : offset( rhs.offset ) + , extent( rhs.extent ) + {} + + Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Rect2D ) ); + return *this; + } + + Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Rect2D& operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRect2D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Rect2D const& ) const = default; +#else + bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ); + } + + bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + }; + static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindImageMemoryDeviceGroupInfo + { + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, + const uint32_t* pDeviceIndices_ = {}, + uint32_t splitInstanceBindRegionCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceIndexCount( deviceIndexCount_ ) + , pDeviceIndices( pDeviceIndices_ ) + , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) + , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceIndexCount( rhs.deviceIndexCount ) + , pDeviceIndices( rhs.pDeviceIndices ) + , splitInstanceBindRegionCount( rhs.splitInstanceBindRegionCount ) + , pSplitInstanceBindRegions( rhs.pSplitInstanceBindRegions ) + {} + + BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryDeviceGroupInfo ) - offsetof( BindImageMemoryDeviceGroupInfo, pNext ) ); + return *this; + } + + BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindImageMemoryDeviceGroupInfo& operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceIndexCount = deviceIndexCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceIndices = pDeviceIndices_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT + { + splitInstanceBindRegionCount = splitInstanceBindRegionCount_; + return *this; + } + + BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT + { + pSplitInstanceBindRegions = pSplitInstanceBindRegions_; + return *this; + } + + operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default; +#else + bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceIndexCount == rhs.deviceIndexCount ) + && ( pDeviceIndices == rhs.pDeviceIndices ) + && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount ) + && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions ); + } + + bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo; + const void* pNext = {}; + uint32_t deviceIndexCount = {}; + const uint32_t* pDeviceIndices = {}; + uint32_t splitInstanceBindRegionCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {}; + }; + static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindImageMemoryInfo + { + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , image( rhs.image ) + , memory( rhs.memory ) + , memoryOffset( rhs.memoryOffset ) + {} + + BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemoryInfo ) - offsetof( BindImageMemoryInfo, pNext ) ); + return *this; + } + + BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindImageMemoryInfo& operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemoryInfo const& ) const = default; +#else + bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ); + } + + bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + }; + static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindImageMemorySwapchainInfoKHR + { + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, + uint32_t imageIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + , imageIndex( imageIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchain( rhs.swapchain ) + , imageIndex( rhs.imageIndex ) + {} + + BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindImageMemorySwapchainInfoKHR ) - offsetof( BindImageMemorySwapchainInfoKHR, pNext ) ); + return *this; + } + + BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindImageMemorySwapchainInfoKHR& operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT + { + imageIndex = imageIndex_; + return *this; + } + + operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default; +#else + bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ) + && ( imageIndex == rhs.imageIndex ); + } + + bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + uint32_t imageIndex = {}; + }; + static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindImagePlaneMemoryInfo + { + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , planeAspect( rhs.planeAspect ) + {} + + BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindImagePlaneMemoryInfo ) - offsetof( BindImagePlaneMemoryInfo, pNext ) ); + return *this; + } + + BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindImagePlaneMemoryInfo& operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default; +#else + bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseMemoryBind + { + VULKAN_HPP_CONSTEXPR SparseMemoryBind( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceOffset( resourceOffset_ ) + , size( size_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const& rhs ) VULKAN_HPP_NOEXCEPT + : resourceOffset( rhs.resourceOffset ) + , size( rhs.size ) + , memory( rhs.memory ) + , memoryOffset( rhs.memoryOffset ) + , flags( rhs.flags ) + {} + + SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseMemoryBind ) ); + return *this; + } + + SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseMemoryBind& operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT + { + resourceOffset = resourceOffset_; + return *this; + } + + SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseMemoryBind const& ) const = default; +#else + bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( resourceOffset == rhs.resourceOffset ) + && ( size == rhs.size ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseBufferMemoryBindInfo + { + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : buffer( rhs.buffer ) + , bindCount( rhs.bindCount ) + , pBinds( rhs.pBinds ) + {} + + SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseBufferMemoryBindInfo ) ); + return *this; + } + + SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseBufferMemoryBindInfo& operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default; +#else + bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + }; + static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageOpaqueMemoryBindInfo + { + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : image( rhs.image ) + , bindCount( rhs.bindCount ) + , pBinds( rhs.pBinds ) + {} + + SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseImageOpaqueMemoryBindInfo ) ); + return *this; + } + + SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageOpaqueMemoryBindInfo& operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default; +#else + bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {}; + }; + static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresource + { + VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , arrayLayer( arrayLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const& rhs ) VULKAN_HPP_NOEXCEPT + : aspectMask( rhs.aspectMask ) + , mipLevel( rhs.mipLevel ) + , arrayLayer( rhs.arrayLayer ) + {} + + ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageSubresource ) ); + return *this; + } + + ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageSubresource& operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayer = arrayLayer_; + return *this; + } + + operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresource const& ) const = default; +#else + bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( arrayLayer == rhs.arrayLayer ); + } + + bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t arrayLayer = {}; + }; + static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Offset3D + { + VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, + int32_t y_ = {}, + int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + , z( rhs.z ) + {} + + explicit Offset3D( Offset2D const& offset2D, + int32_t z_ = {} ) + : x( offset2D.x ) + , y( offset2D.y ) + , z( z_ ) + {} + + Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Offset3D ) ); + return *this; + } + + Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Offset3D& operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkOffset3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Offset3D const& ) const = default; +#else + bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + int32_t x = {}; + int32_t y = {}; + int32_t z = {}; + }; + static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Extent3D + { + VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT + : width( width_ ) + , height( height_ ) + , depth( depth_ ) + {} + + VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const& rhs ) VULKAN_HPP_NOEXCEPT + : width( rhs.width ) + , height( rhs.height ) + , depth( rhs.depth ) + {} + + explicit Extent3D( Extent2D const& extent2D, + uint32_t depth_ = {} ) + : width( extent2D.width ) + , height( extent2D.height ) + , depth( depth_ ) + {} + + Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Extent3D ) ); + return *this; + } + + Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Extent3D& operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtent3D &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Extent3D const& ) const = default; +#else + bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( width == rhs.width ) + && ( height == rhs.height ) + && ( depth == rhs.depth ); + } + + bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t width = {}; + uint32_t height = {}; + uint32_t depth = {}; + }; + static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryBind + { + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : subresource( subresource_ ) + , offset( offset_ ) + , extent( extent_ ) + , memory( memory_ ) + , memoryOffset( memoryOffset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const& rhs ) VULKAN_HPP_NOEXCEPT + : subresource( rhs.subresource ) + , offset( rhs.offset ) + , extent( rhs.extent ) + , memory( rhs.memory ) + , memoryOffset( rhs.memoryOffset ) + , flags( rhs.flags ) + {} + + SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseImageMemoryBind ) ); + return *this; + } + + SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageMemoryBind& operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ ) VULKAN_HPP_NOEXCEPT + { + subresource = subresource_; + return *this; + } + + SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT + { + memoryOffset = memoryOffset_; + return *this; + } + + SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryBind const& ) const = default; +#else + bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subresource == rhs.subresource ) + && ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( memory == rhs.memory ) + && ( memoryOffset == rhs.memoryOffset ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D offset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {}; + VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {}; + }; + static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryBindInfo + { + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + uint32_t bindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , bindCount( bindCount_ ) + , pBinds( pBinds_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : image( rhs.image ) + , bindCount( rhs.bindCount ) + , pBinds( rhs.pBinds ) + {} + + SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseImageMemoryBindInfo ) ); + return *this; + } + + SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageMemoryBindInfo& operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT + { + bindCount = bindCount_; + return *this; + } + + SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBinds = pBinds_; + return *this; + } + + operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryBindInfo const& ) const = default; +#else + bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( image == rhs.image ) + && ( bindCount == rhs.bindCount ) + && ( pBinds == rhs.pBinds ); + } + + bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Image image = {}; + uint32_t bindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {}; + }; + static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BindSparseInfo + { + VULKAN_HPP_CONSTEXPR BindSparseInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, + uint32_t bufferBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, + uint32_t imageOpaqueBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, + uint32_t imageBindCount_ = {}, + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , bufferBindCount( bufferBindCount_ ) + , pBufferBinds( pBufferBinds_ ) + , imageOpaqueBindCount( imageOpaqueBindCount_ ) + , pImageOpaqueBinds( pImageOpaqueBinds_ ) + , imageBindCount( imageBindCount_ ) + , pImageBinds( pImageBinds_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , waitSemaphoreCount( rhs.waitSemaphoreCount ) + , pWaitSemaphores( rhs.pWaitSemaphores ) + , bufferBindCount( rhs.bufferBindCount ) + , pBufferBinds( rhs.pBufferBinds ) + , imageOpaqueBindCount( rhs.imageOpaqueBindCount ) + , pImageOpaqueBinds( rhs.pImageOpaqueBinds ) + , imageBindCount( rhs.imageBindCount ) + , pImageBinds( rhs.pImageBinds ) + , signalSemaphoreCount( rhs.signalSemaphoreCount ) + , pSignalSemaphores( rhs.pSignalSemaphores ) + {} + + BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BindSparseInfo ) - offsetof( BindSparseInfo, pNext ) ); + return *this; + } + + BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BindSparseInfo& operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT + { + bufferBindCount = bufferBindCount_; + return *this; + } + + BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT + { + pBufferBinds = pBufferBinds_; + return *this; + } + + BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageOpaqueBindCount = imageOpaqueBindCount_; + return *this; + } + + BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageOpaqueBinds = pImageOpaqueBinds_; + return *this; + } + + BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT + { + imageBindCount = imageBindCount_; + return *this; + } + + BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT + { + pImageBinds = pImageBinds_; + return *this; + } + + BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BindSparseInfo const& ) const = default; +#else + bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( bufferBindCount == rhs.bufferBindCount ) + && ( pBufferBinds == rhs.pBufferBinds ) + && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount ) + && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds ) + && ( imageBindCount == rhs.imageBindCount ) + && ( pImageBinds == rhs.pImageBinds ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; + uint32_t bufferBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {}; + uint32_t imageOpaqueBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {}; + uint32_t imageBindCount = {}; + const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + }; + static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkBuffer.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBuffer.hpp new file mode 100644 index 00000000..d6da77da --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkBuffer.hpp @@ -0,0 +1,956 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct BufferCopy + { + VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcOffset( srcOffset_ ) + , dstOffset( dstOffset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const& rhs ) VULKAN_HPP_NOEXCEPT + : srcOffset( rhs.srcOffset ) + , dstOffset( rhs.dstOffset ) + , size( rhs.size ) + {} + + BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BufferCopy ) ); + return *this; + } + + BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferCopy& operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferCopy const& ) const = default; +#else + bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcOffset == rhs.srcOffset ) + && ( dstOffset == rhs.dstOffset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferCreateInfo + { + VULKAN_HPP_CONSTEXPR BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , size( size_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , size( rhs.size ) + , usage( rhs.usage ) + , sharingMode( rhs.sharingMode ) + , queueFamilyIndexCount( rhs.queueFamilyIndexCount ) + , pQueueFamilyIndices( rhs.pQueueFamilyIndices ) + {} + + BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferCreateInfo ) - offsetof( BufferCreateInfo, pNext ) ); + return *this; + } + + BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferCreateInfo& operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferCreateInfo const& ) const = default; +#else + bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( size == rhs.size ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t* pQueueFamilyIndices = {}; + }; + static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferDeviceAddressCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceAddress( deviceAddress_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceAddress( rhs.deviceAddress ) + {} + + BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressCreateInfoEXT ) - offsetof( BufferDeviceAddressCreateInfoEXT, pNext ) ); + return *this; + } + + BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferDeviceAddressCreateInfoEXT& operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + deviceAddress = deviceAddress_; + return *this; + } + + operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default; +#else + bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceAddress == rhs.deviceAddress ); + } + + bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {}; + }; + static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferDeviceAddressInfoEXT + { + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfoEXT( BufferDeviceAddressInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , buffer( rhs.buffer ) + {} + + BufferDeviceAddressInfoEXT & operator=( BufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferDeviceAddressInfoEXT ) - offsetof( BufferDeviceAddressInfoEXT, pNext ) ); + return *this; + } + + BufferDeviceAddressInfoEXT( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferDeviceAddressInfoEXT& operator=( VkBufferDeviceAddressInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferDeviceAddressInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferDeviceAddressInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkBufferDeviceAddressInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferDeviceAddressInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferDeviceAddressInfoEXT const& ) const = default; +#else + bool operator==( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferDeviceAddressInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( BufferDeviceAddressInfoEXT ) == sizeof( VkBufferDeviceAddressInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceLayers + { + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t mipLevel_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , mipLevel( mipLevel_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const& rhs ) VULKAN_HPP_NOEXCEPT + : aspectMask( rhs.aspectMask ) + , mipLevel( rhs.mipLevel ) + , baseArrayLayer( rhs.baseArrayLayer ) + , layerCount( rhs.layerCount ) + {} + + ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageSubresourceLayers ) ); + return *this; + } + + ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageSubresourceLayers& operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT + { + mipLevel = mipLevel_; + return *this; + } + + ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresourceLayers const& ) const = default; +#else + bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( mipLevel == rhs.mipLevel ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t mipLevel = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferImageCopy + { + VULKAN_HPP_CONSTEXPR BufferImageCopy( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, + uint32_t bufferRowLength_ = {}, + uint32_t bufferImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferOffset( bufferOffset_ ) + , bufferRowLength( bufferRowLength_ ) + , bufferImageHeight( bufferImageHeight_ ) + , imageSubresource( imageSubresource_ ) + , imageOffset( imageOffset_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const& rhs ) VULKAN_HPP_NOEXCEPT + : bufferOffset( rhs.bufferOffset ) + , bufferRowLength( rhs.bufferRowLength ) + , bufferImageHeight( rhs.bufferImageHeight ) + , imageSubresource( rhs.imageSubresource ) + , imageOffset( rhs.imageOffset ) + , imageExtent( rhs.imageExtent ) + {} + + BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( BufferImageCopy ) ); + return *this; + } + + BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferImageCopy& operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT + { + bufferOffset = bufferOffset_; + return *this; + } + + BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT + { + bufferRowLength = bufferRowLength_; + return *this; + } + + BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT + { + bufferImageHeight = bufferImageHeight_; + return *this; + } + + BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ ) VULKAN_HPP_NOEXCEPT + { + imageSubresource = imageSubresource_; + return *this; + } + + BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ ) VULKAN_HPP_NOEXCEPT + { + imageOffset = imageOffset_; + return *this; + } + + BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferImageCopy const& ) const = default; +#else + bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( bufferOffset == rhs.bufferOffset ) + && ( bufferRowLength == rhs.bufferRowLength ) + && ( bufferImageHeight == rhs.bufferImageHeight ) + && ( imageSubresource == rhs.imageSubresource ) + && ( imageOffset == rhs.imageOffset ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {}; + uint32_t bufferRowLength = {}; + uint32_t bufferImageHeight = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; + }; + static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferMemoryBarrier + { + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , buffer( buffer_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcAccessMask( rhs.srcAccessMask ) + , dstAccessMask( rhs.dstAccessMask ) + , srcQueueFamilyIndex( rhs.srcQueueFamilyIndex ) + , dstQueueFamilyIndex( rhs.dstQueueFamilyIndex ) + , buffer( rhs.buffer ) + , offset( rhs.offset ) + , size( rhs.size ) + {} + + BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryBarrier ) - offsetof( BufferMemoryBarrier, pNext ) ); + return *this; + } + + BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferMemoryBarrier& operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferMemoryBarrier const& ) const = default; +#else + bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferMemoryRequirementsInfo2 + { + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , buffer( rhs.buffer ) + {} + + BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferMemoryRequirementsInfo2 ) - offsetof( BufferMemoryRequirementsInfo2, pNext ) ); + return *this; + } + + BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferMemoryRequirementsInfo2& operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default; +#else + bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct BufferViewCreateInfo + { + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , buffer( buffer_ ) + , format( format_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , buffer( rhs.buffer ) + , format( rhs.format ) + , offset( rhs.offset ) + , range( rhs.range ) + {} + + BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( BufferViewCreateInfo ) - offsetof( BufferViewCreateInfo, pNext ) ); + return *this; + } + + BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + BufferViewCreateInfo& operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( BufferViewCreateInfo const& ) const = default; +#else + bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( format == rhs.format ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCalibrated.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCalibrated.hpp new file mode 100644 index 00000000..ac1540fc --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCalibrated.hpp @@ -0,0 +1,122 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CalibratedTimestampInfoEXT + { + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice ) VULKAN_HPP_NOEXCEPT + : timeDomain( timeDomain_ ) + {} + + VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , timeDomain( rhs.timeDomain ) + {} + + CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CalibratedTimestampInfoEXT ) - offsetof( CalibratedTimestampInfoEXT, pNext ) ); + return *this; + } + + CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CalibratedTimestampInfoEXT& operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT + { + timeDomain = timeDomain_; + return *this; + } + + operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default; +#else + bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( timeDomain == rhs.timeDomain ); + } + + bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice; + }; + static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCheckpoint.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCheckpoint.hpp new file mode 100644 index 00000000..aa4fa401 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCheckpoint.hpp @@ -0,0 +1,116 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CheckpointDataNV + { + VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, + void* pCheckpointMarker_ = {} ) VULKAN_HPP_NOEXCEPT + : stage( stage_ ) + , pCheckpointMarker( pCheckpointMarker_ ) + {} + + VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , stage( rhs.stage ) + , pCheckpointMarker( rhs.pCheckpointMarker ) + {} + + CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CheckpointDataNV ) - offsetof( CheckpointDataNV, pNext ) ); + return *this; + } + + CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CheckpointDataNV& operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CheckpointDataNV const& ) const = default; +#else + bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stage == rhs.stage ) + && ( pCheckpointMarker == rhs.pCheckpointMarker ); + } + + bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe; + void* pCheckpointMarker = {}; + }; + static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkClear.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkClear.hpp new file mode 100644 index 00000000..14d323ce --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkClear.hpp @@ -0,0 +1,394 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + union ClearColorValue + { + ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + } + + ClearColorValue( const std::array& float32_ = {} ) + { + memcpy( float32, float32_.data(), 4 * sizeof( float ) ); + } + + ClearColorValue( const std::array& int32_ ) + { + memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); + } + + ClearColorValue( const std::array& uint32_ ) + { + memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + } + + ClearColorValue & setFloat32( std::array float32_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( float32, float32_.data(), 4 * sizeof( float ) ); + return *this; + } + + ClearColorValue & setInt32( std::array int32_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( int32, int32_.data(), 4 * sizeof( int32_t ) ); + return *this; + } + + ClearColorValue & setUint32( std::array uint32_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( uint32, uint32_.data(), 4 * sizeof( uint32_t ) ); + return *this; + } + + VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) ); + return *this; + } + + operator VkClearColorValue const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearColorValue &() + { + return *reinterpret_cast(this); + } + + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; + }; + + struct ClearDepthStencilValue + { + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, + uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT + : depth( depth_ ) + , stencil( stencil_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const& rhs ) VULKAN_HPP_NOEXCEPT + : depth( rhs.depth ) + , stencil( rhs.stencil ) + {} + + ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ClearDepthStencilValue ) ); + return *this; + } + + ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ClearDepthStencilValue& operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT + { + depth = depth_; + return *this; + } + + ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT + { + stencil = stencil_; + return *this; + } + + operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ClearDepthStencilValue const& ) const = default; +#else + bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( depth == rhs.depth ) + && ( stencil == rhs.stencil ); + } + + bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float depth = {}; + uint32_t stencil = {}; + }; + static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + union ClearValue + { + ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + } + + ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} ) + { + color = color_; + } + + ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) + { + depthStencil = depthStencil_; + } + + ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue color_ ) VULKAN_HPP_NOEXCEPT + { + color = color_; + return *this; + } + + ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ ) VULKAN_HPP_NOEXCEPT + { + depthStencil = depthStencil_; + return *this; + } + + VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) ); + return *this; + } + + operator VkClearValue const&() const + { + return *reinterpret_cast(this); + } + + operator VkClearValue &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::ClearColorValue color; + VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil; +#else + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct ClearAttachment + { + ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t colorAttachment_ = {}, + VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , colorAttachment( colorAttachment_ ) + , clearValue( clearValue_ ) + {} + + ClearAttachment( ClearAttachment const& rhs ) VULKAN_HPP_NOEXCEPT + : aspectMask( rhs.aspectMask ) + , colorAttachment( rhs.colorAttachment ) + , clearValue( rhs.clearValue ) + {} + + ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ClearAttachment ) ); + return *this; + } + + ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ClearAttachment& operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachment = colorAttachment_; + return *this; + } + + ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue clearValue_ ) VULKAN_HPP_NOEXCEPT + { + clearValue = clearValue_; + return *this; + } + + operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t colorAttachment = {}; + VULKAN_HPP_NAMESPACE::ClearValue clearValue = {}; + }; + static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ClearRect + { + VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : rect( rect_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const& rhs ) VULKAN_HPP_NOEXCEPT + : rect( rhs.rect ) + , baseArrayLayer( rhs.baseArrayLayer ) + , layerCount( rhs.layerCount ) + {} + + ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ClearRect ) ); + return *this; + } + + ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ClearRect& operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ ) VULKAN_HPP_NOEXCEPT + { + rect = rect_; + return *this; + } + + ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkClearRect &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ClearRect const& ) const = default; +#else + bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( rect == rhs.rect ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Rect2D rect = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCmd.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCmd.hpp new file mode 100644 index 00000000..f88a9a8a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCmd.hpp @@ -0,0 +1,406 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct IndirectCommandsTokenNVX + { + VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT + : tokenType( tokenType_ ) + , buffer( buffer_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsTokenNVX( IndirectCommandsTokenNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : tokenType( rhs.tokenType ) + , buffer( rhs.buffer ) + , offset( rhs.offset ) + {} + + IndirectCommandsTokenNVX & operator=( IndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( IndirectCommandsTokenNVX ) ); + return *this; + } + + IndirectCommandsTokenNVX( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + IndirectCommandsTokenNVX& operator=( VkIndirectCommandsTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + IndirectCommandsTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsTokenNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + IndirectCommandsTokenNVX & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkIndirectCommandsTokenNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsTokenNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsTokenNVX const& ) const = default; +#else + bool operator==( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( tokenType == rhs.tokenType ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ); + } + + bool operator!=( IndirectCommandsTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + }; + static_assert( sizeof( IndirectCommandsTokenNVX ) == sizeof( VkIndirectCommandsTokenNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CmdProcessCommandsInfoNVX + { + VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, + uint32_t indirectCommandsTokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ = {}, + uint32_t maxSequencesCount_ = {}, + VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , indirectCommandsTokenCount( indirectCommandsTokenCount_ ) + , pIndirectCommandsTokens( pIndirectCommandsTokens_ ) + , maxSequencesCount( maxSequencesCount_ ) + , targetCommandBuffer( targetCommandBuffer_ ) + , sequencesCountBuffer( sequencesCountBuffer_ ) + , sequencesCountOffset( sequencesCountOffset_ ) + , sequencesIndexBuffer( sequencesIndexBuffer_ ) + , sequencesIndexOffset( sequencesIndexOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR CmdProcessCommandsInfoNVX( CmdProcessCommandsInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectTable( rhs.objectTable ) + , indirectCommandsLayout( rhs.indirectCommandsLayout ) + , indirectCommandsTokenCount( rhs.indirectCommandsTokenCount ) + , pIndirectCommandsTokens( rhs.pIndirectCommandsTokens ) + , maxSequencesCount( rhs.maxSequencesCount ) + , targetCommandBuffer( rhs.targetCommandBuffer ) + , sequencesCountBuffer( rhs.sequencesCountBuffer ) + , sequencesCountOffset( rhs.sequencesCountOffset ) + , sequencesIndexBuffer( rhs.sequencesIndexBuffer ) + , sequencesIndexOffset( rhs.sequencesIndexOffset ) + {} + + CmdProcessCommandsInfoNVX & operator=( CmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CmdProcessCommandsInfoNVX ) - offsetof( CmdProcessCommandsInfoNVX, pNext ) ); + return *this; + } + + CmdProcessCommandsInfoNVX( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CmdProcessCommandsInfoNVX& operator=( VkCmdProcessCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CmdProcessCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CmdProcessCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT + { + objectTable = objectTable_; + return *this; + } + + CmdProcessCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdProcessCommandsInfoNVX & setIndirectCommandsTokenCount( uint32_t indirectCommandsTokenCount_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsTokenCount = indirectCommandsTokenCount_; + return *this; + } + + CmdProcessCommandsInfoNVX & setPIndirectCommandsTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens_ ) VULKAN_HPP_NOEXCEPT + { + pIndirectCommandsTokens = pIndirectCommandsTokens_; + return *this; + } + + CmdProcessCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + CmdProcessCommandsInfoNVX & setTargetCommandBuffer( VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer_ ) VULKAN_HPP_NOEXCEPT + { + targetCommandBuffer = targetCommandBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountBuffer = sequencesCountBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesCountOffset = sequencesCountOffset_; + return *this; + } + + CmdProcessCommandsInfoNVX & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexBuffer = sequencesIndexBuffer_; + return *this; + } + + CmdProcessCommandsInfoNVX & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT + { + sequencesIndexOffset = sequencesIndexOffset_; + return *this; + } + + operator VkCmdProcessCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCmdProcessCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CmdProcessCommandsInfoNVX const& ) const = default; +#else + bool operator==( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( indirectCommandsTokenCount == rhs.indirectCommandsTokenCount ) + && ( pIndirectCommandsTokens == rhs.pIndirectCommandsTokens ) + && ( maxSequencesCount == rhs.maxSequencesCount ) + && ( targetCommandBuffer == rhs.targetCommandBuffer ) + && ( sequencesCountBuffer == rhs.sequencesCountBuffer ) + && ( sequencesCountOffset == rhs.sequencesCountOffset ) + && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer ) + && ( sequencesIndexOffset == rhs.sequencesIndexOffset ); + } + + bool operator!=( CmdProcessCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdProcessCommandsInfoNVX; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; + uint32_t indirectCommandsTokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsTokenNVX* pIndirectCommandsTokens = {}; + uint32_t maxSequencesCount = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer targetCommandBuffer = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {}; + VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {}; + }; + static_assert( sizeof( CmdProcessCommandsInfoNVX ) == sizeof( VkCmdProcessCommandsInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CmdReserveSpaceForCommandsInfoNVX + { + VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ = {}, + uint32_t maxSequencesCount_ = {} ) VULKAN_HPP_NOEXCEPT + : objectTable( objectTable_ ) + , indirectCommandsLayout( indirectCommandsLayout_ ) + , maxSequencesCount( maxSequencesCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CmdReserveSpaceForCommandsInfoNVX( CmdReserveSpaceForCommandsInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectTable( rhs.objectTable ) + , indirectCommandsLayout( rhs.indirectCommandsLayout ) + , maxSequencesCount( rhs.maxSequencesCount ) + {} + + CmdReserveSpaceForCommandsInfoNVX & operator=( CmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CmdReserveSpaceForCommandsInfoNVX ) - offsetof( CmdReserveSpaceForCommandsInfoNVX, pNext ) ); + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CmdReserveSpaceForCommandsInfoNVX& operator=( VkCmdReserveSpaceForCommandsInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX & setObjectTable( VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable_ ) VULKAN_HPP_NOEXCEPT + { + objectTable = objectTable_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + CmdReserveSpaceForCommandsInfoNVX & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequencesCount = maxSequencesCount_; + return *this; + } + + operator VkCmdReserveSpaceForCommandsInfoNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCmdReserveSpaceForCommandsInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CmdReserveSpaceForCommandsInfoNVX const& ) const = default; +#else + bool operator==( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectTable == rhs.objectTable ) + && ( indirectCommandsLayout == rhs.indirectCommandsLayout ) + && ( maxSequencesCount == rhs.maxSequencesCount ); + } + + bool operator!=( CmdReserveSpaceForCommandsInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCmdReserveSpaceForCommandsInfoNVX; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectTableNVX objectTable = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNVX indirectCommandsLayout = {}; + uint32_t maxSequencesCount = {}; + }; + static_assert( sizeof( CmdReserveSpaceForCommandsInfoNVX ) == sizeof( VkCmdReserveSpaceForCommandsInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCoarse.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCoarse.hpp new file mode 100644 index 00000000..f98bce6a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCoarse.hpp @@ -0,0 +1,234 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CoarseSampleLocationNV + { + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, + uint32_t pixelY_ = {}, + uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT + : pixelX( pixelX_ ) + , pixelY( pixelY_ ) + , sample( sample_ ) + {} + + VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pixelX( rhs.pixelX ) + , pixelY( rhs.pixelY ) + , sample( rhs.sample ) + {} + + CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( CoarseSampleLocationNV ) ); + return *this; + } + + CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CoarseSampleLocationNV& operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT + { + pixelX = pixelX_; + return *this; + } + + CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT + { + pixelY = pixelY_; + return *this; + } + + CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT + { + sample = sample_; + return *this; + } + + operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CoarseSampleLocationNV const& ) const = default; +#else + bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( pixelX == rhs.pixelX ) + && ( pixelY == rhs.pixelY ) + && ( sample == rhs.sample ); + } + + bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t pixelX = {}; + uint32_t pixelY = {}; + uint32_t sample = {}; + }; + static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CoarseSampleOrderCustomNV + { + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, + uint32_t sampleCount_ = {}, + uint32_t sampleLocationCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRate( shadingRate_ ) + , sampleCount( sampleCount_ ) + , sampleLocationCount( sampleLocationCount_ ) + , pSampleLocations( pSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const& rhs ) VULKAN_HPP_NOEXCEPT + : shadingRate( rhs.shadingRate ) + , sampleCount( rhs.sampleCount ) + , sampleLocationCount( rhs.sampleLocationCount ) + , pSampleLocations( rhs.pSampleLocations ) + {} + + CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( CoarseSampleOrderCustomNV ) ); + return *this; + } + + CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CoarseSampleOrderCustomNV& operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT + { + shadingRate = shadingRate_; + return *this; + } + + CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleCount = sampleCount_; + return *this; + } + + CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationCount = sampleLocationCount_; + return *this; + } + + CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pSampleLocations = pSampleLocations_; + return *this; + } + + operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default; +#else + bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRate == rhs.shadingRate ) + && ( sampleCount == rhs.sampleCount ) + && ( sampleLocationCount == rhs.sampleLocationCount ) + && ( pSampleLocations == rhs.pSampleLocations ); + } + + bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations; + uint32_t sampleCount = {}; + uint32_t sampleLocationCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {}; + }; + static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCommand.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCommand.hpp new file mode 100644 index 00000000..60236000 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCommand.hpp @@ -0,0 +1,523 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CommandBufferAllocateInfo + { + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, + VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, + uint32_t commandBufferCount_ = {} ) VULKAN_HPP_NOEXCEPT + : commandPool( commandPool_ ) + , level( level_ ) + , commandBufferCount( commandBufferCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , commandPool( rhs.commandPool ) + , level( rhs.level ) + , commandBufferCount( rhs.commandBufferCount ) + {} + + CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferAllocateInfo ) - offsetof( CommandBufferAllocateInfo, pNext ) ); + return *this; + } + + CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CommandBufferAllocateInfo& operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT + { + commandPool = commandPool_; + return *this; + } + + CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT + { + level = level_; + return *this; + } + + CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferAllocateInfo const& ) const = default; +#else + bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( commandPool == rhs.commandPool ) + && ( level == rhs.level ) + && ( commandBufferCount == rhs.commandBufferCount ); + } + + bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPool commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary; + uint32_t commandBufferCount = {}; + }; + static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandBufferInheritanceInfo + { + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , subpass( subpass_ ) + , framebuffer( framebuffer_ ) + , occlusionQueryEnable( occlusionQueryEnable_ ) + , queryFlags( queryFlags_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , renderPass( rhs.renderPass ) + , subpass( rhs.subpass ) + , framebuffer( rhs.framebuffer ) + , occlusionQueryEnable( rhs.occlusionQueryEnable ) + , queryFlags( rhs.queryFlags ) + , pipelineStatistics( rhs.pipelineStatistics ) + {} + + CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceInfo ) - offsetof( CommandBufferInheritanceInfo, pNext ) ); + return *this; + } + + CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CommandBufferInheritanceInfo& operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryEnable = occlusionQueryEnable_; + return *this; + } + + CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT + { + queryFlags = queryFlags_; + return *this; + } + + CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferInheritanceInfo const& ) const = default; +#else + bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( framebuffer == rhs.framebuffer ) + && ( occlusionQueryEnable == rhs.occlusionQueryEnable ) + && ( queryFlags == rhs.queryFlags ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {}; + VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandBufferBeginInfo + { + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pInheritanceInfo( pInheritanceInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pInheritanceInfo( rhs.pInheritanceInfo ) + {} + + CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferBeginInfo ) - offsetof( CommandBufferBeginInfo, pNext ) ); + return *this; + } + + CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CommandBufferBeginInfo& operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pInheritanceInfo = pInheritanceInfo_; + return *this; + } + + operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferBeginInfo const& ) const = default; +#else + bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pInheritanceInfo == rhs.pInheritanceInfo ); + } + + bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {}; + const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {}; + }; + static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandBufferInheritanceConditionalRenderingInfoEXT + { + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRenderingEnable( conditionalRenderingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , conditionalRenderingEnable( rhs.conditionalRenderingEnable ) + {} + + CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) - offsetof( CommandBufferInheritanceConditionalRenderingInfoEXT, pNext ) ); + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT& operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRenderingEnable = conditionalRenderingEnable_; + return *this; + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default; +#else + bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable ); + } + + bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {}; + }; + static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct CommandPoolCreateInfo + { + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , queueFamilyIndex( rhs.queueFamilyIndex ) + {} + + CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CommandPoolCreateInfo ) - offsetof( CommandPoolCreateInfo, pNext ) ); + return *this; + } + + CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CommandPoolCreateInfo& operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CommandPoolCreateInfo const& ) const = default; +#else + bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ); + } + + bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + }; + static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkComponent.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkComponent.hpp new file mode 100644 index 00000000..2b7a2703 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkComponent.hpp @@ -0,0 +1,54 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCompute.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCompute.hpp new file mode 100644 index 00000000..4f17a6f3 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCompute.hpp @@ -0,0 +1,472 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SpecializationMapEntry + { + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, + uint32_t offset_ = {}, + size_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : constantID( constantID_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const& rhs ) VULKAN_HPP_NOEXCEPT + : constantID( rhs.constantID ) + , offset( rhs.offset ) + , size( rhs.size ) + {} + + SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SpecializationMapEntry ) ); + return *this; + } + + SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SpecializationMapEntry& operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT + { + constantID = constantID_; + return *this; + } + + SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SpecializationMapEntry const& ) const = default; +#else + bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( constantID == rhs.constantID ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t constantID = {}; + uint32_t offset = {}; + size_t size = {}; + }; + static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SpecializationInfo + { + VULKAN_HPP_CONSTEXPR SpecializationInfo( uint32_t mapEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, + size_t dataSize_ = {}, + const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + : mapEntryCount( mapEntryCount_ ) + , pMapEntries( pMapEntries_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : mapEntryCount( rhs.mapEntryCount ) + , pMapEntries( rhs.pMapEntries ) + , dataSize( rhs.dataSize ) + , pData( rhs.pData ) + {} + + SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SpecializationInfo ) ); + return *this; + } + + SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SpecializationInfo& operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + mapEntryCount = mapEntryCount_; + return *this; + } + + SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT + { + pMapEntries = pMapEntries_; + return *this; + } + + SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SpecializationInfo const& ) const = default; +#else + bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( mapEntryCount == rhs.mapEntryCount ) + && ( pMapEntries == rhs.pMapEntries ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t mapEntryCount = {}; + const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {}; + size_t dataSize = {}; + const void* pData = {}; + }; + static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineShaderStageCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, + VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, + const char* pName_ = {}, + const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , module( module_ ) + , pName( pName_ ) + , pSpecializationInfo( pSpecializationInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , stage( rhs.stage ) + , module( rhs.module ) + , pName( rhs.pName ) + , pSpecializationInfo( rhs.pSpecializationInfo ) + {} + + PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageCreateInfo ) - offsetof( PipelineShaderStageCreateInfo, pNext ) ); + return *this; + } + + PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineShaderStageCreateInfo& operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT + { + module = module_; + return *this; + } + + PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT + { + pName = pName_; + return *this; + } + + PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pSpecializationInfo = pSpecializationInfo_; + return *this; + } + + operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default; +#else + bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( module == rhs.module ) + && ( pName == rhs.pName ) + && ( pSpecializationInfo == rhs.pSpecializationInfo ); + } + + bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex; + VULKAN_HPP_NAMESPACE::ShaderModule module = {}; + const char* pName = {}; + const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {}; + }; + static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ComputePipelineCreateInfo + { + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stage( stage_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , stage( rhs.stage ) + , layout( rhs.layout ) + , basePipelineHandle( rhs.basePipelineHandle ) + , basePipelineIndex( rhs.basePipelineIndex ) + {} + + ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ComputePipelineCreateInfo ) - offsetof( ComputePipelineCreateInfo, pNext ) ); + return *this; + } + + ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ComputePipelineCreateInfo& operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ComputePipelineCreateInfo const& ) const = default; +#else + bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stage == rhs.stage ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkConditional.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkConditional.hpp new file mode 100644 index 00000000..a439e118 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkConditional.hpp @@ -0,0 +1,152 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCompute.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ConditionalRenderingBeginInfoEXT + { + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , buffer( rhs.buffer ) + , offset( rhs.offset ) + , flags( rhs.flags ) + {} + + ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ConditionalRenderingBeginInfoEXT ) - offsetof( ConditionalRenderingBeginInfoEXT, pNext ) ); + return *this; + } + + ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ConditionalRenderingBeginInfoEXT& operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default; +#else + bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( flags == rhs.flags ); + } + + bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {}; + }; + static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkConformance.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkConformance.hpp new file mode 100644 index 00000000..07942456 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkConformance.hpp @@ -0,0 +1,153 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ConformanceVersionKHR + { + VULKAN_HPP_CONSTEXPR ConformanceVersionKHR( uint8_t major_ = {}, + uint8_t minor_ = {}, + uint8_t subminor_ = {}, + uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT + : major( major_ ) + , minor( minor_ ) + , subminor( subminor_ ) + , patch( patch_ ) + {} + + VULKAN_HPP_CONSTEXPR ConformanceVersionKHR( ConformanceVersionKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : major( rhs.major ) + , minor( rhs.minor ) + , subminor( rhs.subminor ) + , patch( rhs.patch ) + {} + + ConformanceVersionKHR & operator=( ConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ConformanceVersionKHR ) ); + return *this; + } + + ConformanceVersionKHR( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ConformanceVersionKHR& operator=( VkConformanceVersionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ConformanceVersionKHR & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT + { + major = major_; + return *this; + } + + ConformanceVersionKHR & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT + { + minor = minor_; + return *this; + } + + ConformanceVersionKHR & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT + { + subminor = subminor_; + return *this; + } + + ConformanceVersionKHR & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT + { + patch = patch_; + return *this; + } + + operator VkConformanceVersionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkConformanceVersionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ConformanceVersionKHR const& ) const = default; +#else + bool operator==( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( major == rhs.major ) + && ( minor == rhs.minor ) + && ( subminor == rhs.subminor ) + && ( patch == rhs.patch ); + } + + bool operator!=( ConformanceVersionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint8_t major = {}; + uint8_t minor = {}; + uint8_t subminor = {}; + uint8_t patch = {}; + }; + static_assert( sizeof( ConformanceVersionKHR ) == sizeof( VkConformanceVersionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCooperative.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCooperative.hpp new file mode 100644 index 00000000..134b90e4 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCooperative.hpp @@ -0,0 +1,209 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CooperativeMatrixPropertiesNV + { + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( uint32_t MSize_ = {}, + uint32_t NSize_ = {}, + uint32_t KSize_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, + VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice ) VULKAN_HPP_NOEXCEPT + : MSize( MSize_ ) + , NSize( NSize_ ) + , KSize( KSize_ ) + , AType( AType_ ) + , BType( BType_ ) + , CType( CType_ ) + , DType( DType_ ) + , scope( scope_ ) + {} + + VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , MSize( rhs.MSize ) + , NSize( rhs.NSize ) + , KSize( rhs.KSize ) + , AType( rhs.AType ) + , BType( rhs.BType ) + , CType( rhs.CType ) + , DType( rhs.DType ) + , scope( rhs.scope ) + {} + + CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CooperativeMatrixPropertiesNV ) - offsetof( CooperativeMatrixPropertiesNV, pNext ) ); + return *this; + } + + CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CooperativeMatrixPropertiesNV& operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT + { + MSize = MSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT + { + NSize = NSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT + { + KSize = KSize_; + return *this; + } + + CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT + { + AType = AType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT + { + BType = BType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT + { + CType = CType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT + { + DType = DType_; + return *this; + } + + CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT + { + scope = scope_; + return *this; + } + + operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default; +#else + bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( MSize == rhs.MSize ) + && ( NSize == rhs.NSize ) + && ( KSize == rhs.KSize ) + && ( AType == rhs.AType ) + && ( BType == rhs.BType ) + && ( CType == rhs.CType ) + && ( DType == rhs.DType ) + && ( scope == rhs.scope ); + } + + bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV; + void* pNext = {}; + uint32_t MSize = {}; + uint32_t NSize = {}; + uint32_t KSize = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16; + VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice; + }; + static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkCopy.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCopy.hpp new file mode 100644 index 00000000..d723be3e --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkCopy.hpp @@ -0,0 +1,199 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct CopyDescriptorSet + { + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, + uint32_t srcBinding_ = {}, + uint32_t srcArrayElement_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSet( srcSet_ ) + , srcBinding( srcBinding_ ) + , srcArrayElement( srcArrayElement_ ) + , dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcSet( rhs.srcSet ) + , srcBinding( rhs.srcBinding ) + , srcArrayElement( rhs.srcArrayElement ) + , dstSet( rhs.dstSet ) + , dstBinding( rhs.dstBinding ) + , dstArrayElement( rhs.dstArrayElement ) + , descriptorCount( rhs.descriptorCount ) + {} + + CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( CopyDescriptorSet ) - offsetof( CopyDescriptorSet, pNext ) ); + return *this; + } + + CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + CopyDescriptorSet& operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT + { + srcSet = srcSet_; + return *this; + } + + CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT + { + srcBinding = srcBinding_; + return *this; + } + + CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + srcArrayElement = srcArrayElement_; + return *this; + } + + CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( CopyDescriptorSet const& ) const = default; +#else + bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSet == rhs.srcSet ) + && ( srcBinding == rhs.srcBinding ) + && ( srcArrayElement == rhs.srcArrayElement ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {}; + uint32_t srcBinding = {}; + uint32_t srcArrayElement = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + }; + static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkD3D.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkD3D.hpp new file mode 100644 index 00000000..3caaf9ba --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkD3D.hpp @@ -0,0 +1,169 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct D3D12FenceSubmitInfoKHR + { + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( uint32_t waitSemaphoreValuesCount_ = {}, + const uint64_t* pWaitSemaphoreValues_ = {}, + uint32_t signalSemaphoreValuesCount_ = {}, + const uint64_t* pSignalSemaphoreValues_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) + , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) + , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) + , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + {} + + VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , waitSemaphoreValuesCount( rhs.waitSemaphoreValuesCount ) + , pWaitSemaphoreValues( rhs.pWaitSemaphoreValues ) + , signalSemaphoreValuesCount( rhs.signalSemaphoreValuesCount ) + , pSignalSemaphoreValues( rhs.pSignalSemaphoreValues ) + {} + + D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( D3D12FenceSubmitInfoKHR ) - offsetof( D3D12FenceSubmitInfoKHR, pNext ) ); + return *this; + } + + D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + D3D12FenceSubmitInfoKHR& operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreValuesCount = waitSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreValues = pWaitSemaphoreValues_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreValuesCount = signalSemaphoreValuesCount_; + return *this; + } + + D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreValues = pSignalSemaphoreValues_; + return *this; + } + + operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default; +#else + bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount ) + && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues ) + && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount ) + && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues ); + } + + bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR; + const void* pNext = {}; + uint32_t waitSemaphoreValuesCount = {}; + const uint64_t* pWaitSemaphoreValues = {}; + uint32_t signalSemaphoreValuesCount = {}; + const uint64_t* pSignalSemaphoreValues = {}; + }; + static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDebug.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDebug.hpp new file mode 100644 index 00000000..f3a827b3 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDebug.hpp @@ -0,0 +1,1055 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DebugMarkerMarkerInfoEXT + { + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char* pMarkerName_ = {}, + std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT + : pMarkerName( pMarkerName_ ) + , color{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, color_ ); + } + + VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pMarkerName( rhs.pMarkerName ) + , color{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, rhs.color ); + } + + DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerMarkerInfoEXT ) - offsetof( DebugMarkerMarkerInfoEXT, pNext ) ); + return *this; + } + + DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugMarkerMarkerInfoEXT& operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT + { + pMarkerName = pMarkerName_; + return *this; + } + + DebugMarkerMarkerInfoEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default; +#else + bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pMarkerName == rhs.pMarkerName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT; + const void* pNext = {}; + const char* pMarkerName = {}; + float color[4] = {}; + }; + static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugMarkerObjectNameInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectType( rhs.objectType ) + , object( rhs.object ) + , pObjectName( rhs.pObjectName ) + {} + + DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectNameInfoEXT ) - offsetof( DebugMarkerObjectNameInfoEXT, pNext ) ); + return *this; + } + + DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugMarkerObjectNameInfoEXT& operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default; +#else + bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + const char* pObjectName = {}; + }; + static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugMarkerObjectTagInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, + uint64_t object_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , object( object_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectType( rhs.objectType ) + , object( rhs.object ) + , tagName( rhs.tagName ) + , tagSize( rhs.tagSize ) + , pTag( rhs.pTag ) + {} + + DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugMarkerObjectTagInfoEXT ) - offsetof( DebugMarkerObjectTagInfoEXT, pNext ) ); + return *this; + } + + DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugMarkerObjectTagInfoEXT& operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT + { + object = object_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + + operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default; +#else + bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( object == rhs.object ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + uint64_t object = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void* pTag = {}; + }; + static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugReportCallbackCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, + void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pfnCallback( pfnCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pfnCallback( rhs.pfnCallback ) + , pUserData( rhs.pUserData ) + {} + + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugReportCallbackCreateInfoEXT ) - offsetof( DebugReportCallbackCreateInfoEXT, pNext ) ); + return *this; + } + + DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugReportCallbackCreateInfoEXT& operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnCallback = pfnCallback_; + return *this; + } + + DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default; +#else + bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pfnCallback == rhs.pfnCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + PFN_vkDebugReportCallbackEXT pfnCallback = {}; + void* pUserData = {}; + }; + static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugUtilsLabelEXT + { + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char* pLabelName_ = {}, + std::array const& color_ = {} ) VULKAN_HPP_NOEXCEPT + : pLabelName( pLabelName_ ) + , color{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, color_ ); + } + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pLabelName( rhs.pLabelName ) + , color{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( color, rhs.color ); + } + + DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsLabelEXT ) - offsetof( DebugUtilsLabelEXT, pNext ) ); + return *this; + } + + DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugUtilsLabelEXT& operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT + { + pLabelName = pLabelName_; + return *this; + } + + DebugUtilsLabelEXT & setColor( std::array color_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( color, color_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsLabelEXT const& ) const = default; +#else + bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pLabelName == rhs.pLabelName ) + && ( memcmp( color, rhs.color, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT; + const void* pNext = {}; + const char* pLabelName = {}; + float color[4] = {}; + }; + static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugUtilsObjectNameInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + const char* pObjectName_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , pObjectName( pObjectName_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectType( rhs.objectType ) + , objectHandle( rhs.objectHandle ) + , pObjectName( rhs.pObjectName ) + {} + + DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectNameInfoEXT ) - offsetof( DebugUtilsObjectNameInfoEXT, pNext ) ); + return *this; + } + + DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugUtilsObjectNameInfoEXT& operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT + { + pObjectName = pObjectName_; + return *this; + } + + operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default; +#else + bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( pObjectName == rhs.pObjectName ); + } + + bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + const char* pObjectName = {}; + }; + static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugUtilsMessengerCallbackDataEXT + { + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, + const char* pMessageIdName_ = {}, + int32_t messageIdNumber_ = {}, + const char* pMessage_ = {}, + uint32_t queueLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, + uint32_t cmdBufLabelCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, + uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pMessageIdName( pMessageIdName_ ) + , messageIdNumber( messageIdNumber_ ) + , pMessage( pMessage_ ) + , queueLabelCount( queueLabelCount_ ) + , pQueueLabels( pQueueLabels_ ) + , cmdBufLabelCount( cmdBufLabelCount_ ) + , pCmdBufLabels( pCmdBufLabels_ ) + , objectCount( objectCount_ ) + , pObjects( pObjects_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pMessageIdName( rhs.pMessageIdName ) + , messageIdNumber( rhs.messageIdNumber ) + , pMessage( rhs.pMessage ) + , queueLabelCount( rhs.queueLabelCount ) + , pQueueLabels( rhs.pQueueLabels ) + , cmdBufLabelCount( rhs.cmdBufLabelCount ) + , pCmdBufLabels( rhs.pCmdBufLabels ) + , objectCount( rhs.objectCount ) + , pObjects( rhs.pObjects ) + {} + + DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCallbackDataEXT ) - offsetof( DebugUtilsMessengerCallbackDataEXT, pNext ) ); + return *this; + } + + DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugUtilsMessengerCallbackDataEXT& operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT + { + pMessageIdName = pMessageIdName_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT + { + messageIdNumber = messageIdNumber_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT + { + pMessage = pMessage_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + queueLabelCount = queueLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT + { + pQueueLabels = pQueueLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT + { + cmdBufLabelCount = cmdBufLabelCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT + { + pCmdBufLabels = pCmdBufLabels_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT + { + pObjects = pObjects_; + return *this; + } + + operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default; +#else + bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pMessageIdName == rhs.pMessageIdName ) + && ( messageIdNumber == rhs.messageIdNumber ) + && ( pMessage == rhs.pMessage ) + && ( queueLabelCount == rhs.queueLabelCount ) + && ( pQueueLabels == rhs.pQueueLabels ) + && ( cmdBufLabelCount == rhs.cmdBufLabelCount ) + && ( pCmdBufLabels == rhs.pCmdBufLabels ) + && ( objectCount == rhs.objectCount ) + && ( pObjects == rhs.pObjects ); + } + + bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {}; + const char* pMessageIdName = {}; + int32_t messageIdNumber = {}; + const char* pMessage = {}; + uint32_t queueLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {}; + uint32_t cmdBufLabelCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {}; + }; + static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugUtilsMessengerCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , messageSeverity( messageSeverity_ ) + , messageType( messageType_ ) + , pfnUserCallback( pfnUserCallback_ ) + , pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , messageSeverity( rhs.messageSeverity ) + , messageType( rhs.messageType ) + , pfnUserCallback( rhs.pfnUserCallback ) + , pUserData( rhs.pUserData ) + {} + + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsMessengerCreateInfoEXT ) - offsetof( DebugUtilsMessengerCreateInfoEXT, pNext ) ); + return *this; + } + + DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugUtilsMessengerCreateInfoEXT& operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT + { + messageSeverity = messageSeverity_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT + { + messageType = messageType_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + pfnUserCallback = pfnUserCallback_; + return *this; + } + + DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default; +#else + bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( messageSeverity == rhs.messageSeverity ) + && ( messageType == rhs.messageType ) + && ( pfnUserCallback == rhs.pfnUserCallback ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void* pUserData = {}; + }; + static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DebugUtilsObjectTagInfoEXT + { + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint64_t tagName_ = {}, + size_t tagSize_ = {}, + const void* pTag_ = {} ) VULKAN_HPP_NOEXCEPT + : objectType( objectType_ ) + , objectHandle( objectHandle_ ) + , tagName( tagName_ ) + , tagSize( tagSize_ ) + , pTag( pTag_ ) + {} + + VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectType( rhs.objectType ) + , objectHandle( rhs.objectHandle ) + , tagName( rhs.tagName ) + , tagSize( rhs.tagSize ) + , pTag( rhs.pTag ) + {} + + DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DebugUtilsObjectTagInfoEXT ) - offsetof( DebugUtilsObjectTagInfoEXT, pNext ) ); + return *this; + } + + DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DebugUtilsObjectTagInfoEXT& operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT + { + objectType = objectType_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT + { + objectHandle = objectHandle_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT + { + tagName = tagName_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT + { + tagSize = tagSize_; + return *this; + } + + DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT + { + pTag = pTag_; + return *this; + } + + operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default; +#else + bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectType == rhs.objectType ) + && ( objectHandle == rhs.objectHandle ) + && ( tagName == rhs.tagName ) + && ( tagSize == rhs.tagSize ) + && ( pTag == rhs.pTag ); + } + + bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint64_t tagName = {}; + size_t tagSize = {}; + const void* pTag = {}; + }; + static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDedicated.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDedicated.hpp new file mode 100644 index 00000000..4bd60ec9 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDedicated.hpp @@ -0,0 +1,296 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DedicatedAllocationBufferCreateInfoNV + { + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , dedicatedAllocation( rhs.dedicatedAllocation ) + {} + + DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationBufferCreateInfoNV ) - offsetof( DedicatedAllocationBufferCreateInfoNV, pNext ) ); + return *this; + } + + DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DedicatedAllocationBufferCreateInfoNV& operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default; +#else + bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DedicatedAllocationImageCreateInfoNV + { + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocation( dedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , dedicatedAllocation( rhs.dedicatedAllocation ) + {} + + DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationImageCreateInfoNV ) - offsetof( DedicatedAllocationImageCreateInfoNV, pNext ) ); + return *this; + } + + DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DedicatedAllocationImageCreateInfoNV& operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocation = dedicatedAllocation_; + return *this; + } + + operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default; +#else + bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocation == rhs.dedicatedAllocation ); + } + + bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {}; + }; + static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DedicatedAllocationMemoryAllocateInfoNV + { + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , image( rhs.image ) + , buffer( rhs.buffer ) + {} + + DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DedicatedAllocationMemoryAllocateInfoNV ) - offsetof( DedicatedAllocationMemoryAllocateInfoNV, pNext ) ); + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DedicatedAllocationMemoryAllocateInfoNV& operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default; +#else + bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDescriptor.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDescriptor.hpp new file mode 100644 index 00000000..0257a2be --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDescriptor.hpp @@ -0,0 +1,1351 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDescriptor.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DescriptorBufferInfo + { + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + , offset( offset_ ) + , range( range_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : buffer( rhs.buffer ) + , offset( rhs.offset ) + , range( rhs.range ) + {} + + DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DescriptorBufferInfo ) ); + return *this; + } + + DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorBufferInfo& operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT + { + range = range_; + return *this; + } + + operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorBufferInfo const& ) const = default; +#else + bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer == rhs.buffer ) + && ( offset == rhs.offset ) + && ( range == rhs.range ); + } + + bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize range = {}; + }; + static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorImageInfo + { + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, + VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : sampler( sampler_ ) + , imageView( imageView_ ) + , imageLayout( imageLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : sampler( rhs.sampler ) + , imageView( rhs.imageView ) + , imageLayout( rhs.imageLayout ) + {} + + DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DescriptorImageInfo ) ); + return *this; + } + + DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorImageInfo& operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT + { + imageLayout = imageLayout_; + return *this; + } + + operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorImageInfo const& ) const = default; +#else + bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sampler == rhs.sampler ) + && ( imageView == rhs.imageView ) + && ( imageLayout == rhs.imageLayout ); + } + + bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorPoolSize + { + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , descriptorCount( descriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , descriptorCount( rhs.descriptorCount ) + {} + + DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DescriptorPoolSize ) ); + return *this; + } + + DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorPoolSize& operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolSize const& ) const = default; +#else + bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( descriptorCount == rhs.descriptorCount ); + } + + bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + }; + static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorPoolCreateInfo + { + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, + uint32_t maxSets_ = {}, + uint32_t poolSizeCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , maxSets( maxSets_ ) + , poolSizeCount( poolSizeCount_ ) + , pPoolSizes( pPoolSizes_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , maxSets( rhs.maxSets ) + , poolSizeCount( rhs.poolSizeCount ) + , pPoolSizes( rhs.pPoolSizes ) + {} + + DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorPoolCreateInfo ) - offsetof( DescriptorPoolCreateInfo, pNext ) ); + return *this; + } + + DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorPoolCreateInfo& operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT + { + maxSets = maxSets_; + return *this; + } + + DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT + { + poolSizeCount = poolSizeCount_; + return *this; + } + + DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT + { + pPoolSizes = pPoolSizes_; + return *this; + } + + operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolCreateInfo const& ) const = default; +#else + bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( maxSets == rhs.maxSets ) + && ( poolSizeCount == rhs.poolSizeCount ) + && ( pPoolSizes == rhs.pPoolSizes ); + } + + bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {}; + uint32_t maxSets = {}; + uint32_t poolSizeCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {}; + }; + static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorPoolInlineUniformBlockCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( uint32_t maxInlineUniformBlockBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxInlineUniformBlockBindings( rhs.maxInlineUniformBlockBindings ) + {} + + DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) - offsetof( DescriptorPoolInlineUniformBlockCreateInfoEXT, pNext ) ); + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT& operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT + { + maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_; + return *this; + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default; +#else + bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings ); + } + + bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT; + const void* pNext = {}; + uint32_t maxInlineUniformBlockBindings = {}; + }; + static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetAllocateInfo + { + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorPool( descriptorPool_ ) + , descriptorSetCount( descriptorSetCount_ ) + , pSetLayouts( pSetLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , descriptorPool( rhs.descriptorPool ) + , descriptorSetCount( rhs.descriptorSetCount ) + , pSetLayouts( rhs.pSetLayouts ) + {} + + DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetAllocateInfo ) - offsetof( DescriptorSetAllocateInfo, pNext ) ); + return *this; + } + + DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetAllocateInfo& operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT + { + descriptorPool = descriptorPool_; + return *this; + } + + DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + + operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetAllocateInfo const& ) const = default; +#else + bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorPool == rhs.descriptorPool ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pSetLayouts == rhs.pSetLayouts ); + } + + bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {}; + uint32_t descriptorSetCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; + }; + static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetLayoutBinding + { + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , descriptorType( descriptorType_ ) + , descriptorCount( descriptorCount_ ) + , stageFlags( stageFlags_ ) + , pImmutableSamplers( pImmutableSamplers_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const& rhs ) VULKAN_HPP_NOEXCEPT + : binding( rhs.binding ) + , descriptorType( rhs.descriptorType ) + , descriptorCount( rhs.descriptorCount ) + , stageFlags( rhs.stageFlags ) + , pImmutableSamplers( rhs.pImmutableSamplers ) + {} + + DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DescriptorSetLayoutBinding ) ); + return *this; + } + + DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetLayoutBinding& operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT + { + pImmutableSamplers = pImmutableSamplers_; + return *this; + } + + operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutBinding const& ) const = default; +#else + bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) + && ( descriptorType == rhs.descriptorType ) + && ( descriptorCount == rhs.descriptorCount ) + && ( stageFlags == rhs.stageFlags ) + && ( pImmutableSamplers == rhs.pImmutableSamplers ); + } + + bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {}; + }; + static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetLayoutBindingFlagsCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfoEXT( uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : bindingCount( bindingCount_ ) + , pBindingFlags( pBindingFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfoEXT( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , bindingCount( rhs.bindingCount ) + , pBindingFlags( rhs.pBindingFlags ) + {} + + DescriptorSetLayoutBindingFlagsCreateInfoEXT & operator=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) - offsetof( DescriptorSetLayoutBindingFlagsCreateInfoEXT, pNext ) ); + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT& operator=( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutBindingFlagsCreateInfoEXT & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT + { + pBindingFlags = pBindingFlags_; + return *this; + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutBindingFlagsCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& ) const = default; +#else + bool operator==( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindingFlags == rhs.pBindingFlags ); + } + + bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfoEXT; + const void* pNext = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT* pBindingFlags = {}; + }; + static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfoEXT ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetLayoutCreateInfo + { + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, + uint32_t bindingCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , bindingCount( bindingCount_ ) + , pBindings( pBindings_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , bindingCount( rhs.bindingCount ) + , pBindings( rhs.pBindings ) + {} + + DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutCreateInfo ) - offsetof( DescriptorSetLayoutCreateInfo, pNext ) ); + return *this; + } + + DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetLayoutCreateInfo& operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT + { + bindingCount = bindingCount_; + return *this; + } + + DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT + { + pBindings = pBindings_; + return *this; + } + + operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default; +#else + bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( bindingCount == rhs.bindingCount ) + && ( pBindings == rhs.pBindings ); + } + + bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {}; + uint32_t bindingCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {}; + }; + static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetLayoutSupport + { + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {} ) VULKAN_HPP_NOEXCEPT + : supported( supported_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , supported( rhs.supported ) + {} + + DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetLayoutSupport ) - offsetof( DescriptorSetLayoutSupport, pNext ) ); + return *this; + } + + DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetLayoutSupport& operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetLayoutSupport const& ) const = default; +#else + bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supported == rhs.supported ); + } + + bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supported = {}; + }; + static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetVariableDescriptorCountAllocateInfoEXT + { + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfoEXT( uint32_t descriptorSetCount_ = {}, + const uint32_t* pDescriptorCounts_ = {} ) VULKAN_HPP_NOEXCEPT + : descriptorSetCount( descriptorSetCount_ ) + , pDescriptorCounts( pDescriptorCounts_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfoEXT( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , descriptorSetCount( rhs.descriptorSetCount ) + , pDescriptorCounts( rhs.pDescriptorCounts ) + {} + + DescriptorSetVariableDescriptorCountAllocateInfoEXT & operator=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) - offsetof( DescriptorSetVariableDescriptorCountAllocateInfoEXT, pNext ) ); + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT& operator=( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetCount = descriptorSetCount_; + return *this; + } + + DescriptorSetVariableDescriptorCountAllocateInfoEXT & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorCounts = pDescriptorCounts_; + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( descriptorSetCount == rhs.descriptorSetCount ) + && ( pDescriptorCounts == rhs.pDescriptorCounts ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfoEXT; + const void* pNext = {}; + uint32_t descriptorSetCount = {}; + const uint32_t* pDescriptorCounts = {}; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfoEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorSetVariableDescriptorCountLayoutSupportEXT + { + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupportEXT( uint32_t maxVariableDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupportEXT( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxVariableDescriptorCount( rhs.maxVariableDescriptorCount ) + {} + + DescriptorSetVariableDescriptorCountLayoutSupportEXT & operator=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) - offsetof( DescriptorSetVariableDescriptorCountLayoutSupportEXT, pNext ) ); + return *this; + } + + DescriptorSetVariableDescriptorCountLayoutSupportEXT( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorSetVariableDescriptorCountLayoutSupportEXT& operator=( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorSetVariableDescriptorCountLayoutSupportEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& ) const = default; +#else + bool operator==( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount ); + } + + bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupportEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupportEXT; + void* pNext = {}; + uint32_t maxVariableDescriptorCount = {}; + }; + static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupportEXT ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupportEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorUpdateTemplateEntry + { + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + size_t offset_ = {}, + size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , offset( offset_ ) + , stride( stride_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const& rhs ) VULKAN_HPP_NOEXCEPT + : dstBinding( rhs.dstBinding ) + , dstArrayElement( rhs.dstArrayElement ) + , descriptorCount( rhs.descriptorCount ) + , descriptorType( rhs.descriptorType ) + , offset( rhs.offset ) + , stride( rhs.stride ) + {} + + DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DescriptorUpdateTemplateEntry ) ); + return *this; + } + + DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorUpdateTemplateEntry& operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default; +#else + bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( offset == rhs.offset ) + && ( stride == rhs.stride ); + } + + bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + size_t offset = {}; + size_t stride = {}; + }; + static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DescriptorUpdateTemplateCreateInfo + { + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, + uint32_t descriptorUpdateEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t set_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) + , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) + , templateType( templateType_ ) + , descriptorSetLayout( descriptorSetLayout_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , pipelineLayout( pipelineLayout_ ) + , set( set_ ) + {} + + VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , descriptorUpdateEntryCount( rhs.descriptorUpdateEntryCount ) + , pDescriptorUpdateEntries( rhs.pDescriptorUpdateEntries ) + , templateType( rhs.templateType ) + , descriptorSetLayout( rhs.descriptorSetLayout ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , pipelineLayout( rhs.pipelineLayout ) + , set( rhs.set ) + {} + + DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DescriptorUpdateTemplateCreateInfo ) - offsetof( DescriptorUpdateTemplateCreateInfo, pNext ) ); + return *this; + } + + DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DescriptorUpdateTemplateCreateInfo& operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorUpdateEntryCount = descriptorUpdateEntryCount_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT + { + pDescriptorUpdateEntries = pDescriptorUpdateEntries_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT + { + templateType = templateType_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSetLayout = descriptorSetLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + { + set = set_; + return *this; + } + + operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default; +#else + bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount ) + && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries ) + && ( templateType == rhs.templateType ) + && ( descriptorSetLayout == rhs.descriptorSetLayout ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( set == rhs.set ); + } + + bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {}; + uint32_t descriptorUpdateEntryCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t set = {}; + }; + static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDevice.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDevice.hpp new file mode 100644 index 00000000..bcaa1020 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDevice.hpp @@ -0,0 +1,2200 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDevice.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DeviceQueueCreateInfo + { + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueCount_ = {}, + const float* pQueuePriorities_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueCount( queueCount_ ) + , pQueuePriorities( pQueuePriorities_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , queueFamilyIndex( rhs.queueFamilyIndex ) + , queueCount( rhs.queueCount ) + , pQueuePriorities( rhs.pQueuePriorities ) + {} + + DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueCreateInfo ) - offsetof( DeviceQueueCreateInfo, pNext ) ); + return *this; + } + + DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceQueueCreateInfo& operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCount = queueCount_; + return *this; + } + + DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT + { + pQueuePriorities = pQueuePriorities_; + return *this; + } + + operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueCreateInfo const& ) const = default; +#else + bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueCount == rhs.queueCount ) + && ( pQueuePriorities == rhs.pQueuePriorities ); + } + + bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueCount = {}; + const float* pQueuePriorities = {}; + }; + static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( robustBufferAccess_ ) + , fullDrawIndexUint32( fullDrawIndexUint32_ ) + , imageCubeArray( imageCubeArray_ ) + , independentBlend( independentBlend_ ) + , geometryShader( geometryShader_ ) + , tessellationShader( tessellationShader_ ) + , sampleRateShading( sampleRateShading_ ) + , dualSrcBlend( dualSrcBlend_ ) + , logicOp( logicOp_ ) + , multiDrawIndirect( multiDrawIndirect_ ) + , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) + , depthClamp( depthClamp_ ) + , depthBiasClamp( depthBiasClamp_ ) + , fillModeNonSolid( fillModeNonSolid_ ) + , depthBounds( depthBounds_ ) + , wideLines( wideLines_ ) + , largePoints( largePoints_ ) + , alphaToOne( alphaToOne_ ) + , multiViewport( multiViewport_ ) + , samplerAnisotropy( samplerAnisotropy_ ) + , textureCompressionETC2( textureCompressionETC2_ ) + , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) + , textureCompressionBC( textureCompressionBC_ ) + , occlusionQueryPrecise( occlusionQueryPrecise_ ) + , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) + , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) + , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) + , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) + , shaderImageGatherExtended( shaderImageGatherExtended_ ) + , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) + , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) + , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) + , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) + , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) + , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) + , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) + , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) + , shaderClipDistance( shaderClipDistance_ ) + , shaderCullDistance( shaderCullDistance_ ) + , shaderFloat64( shaderFloat64_ ) + , shaderInt64( shaderInt64_ ) + , shaderInt16( shaderInt16_ ) + , shaderResourceResidency( shaderResourceResidency_ ) + , shaderResourceMinLod( shaderResourceMinLod_ ) + , sparseBinding( sparseBinding_ ) + , sparseResidencyBuffer( sparseResidencyBuffer_ ) + , sparseResidencyImage2D( sparseResidencyImage2D_ ) + , sparseResidencyImage3D( sparseResidencyImage3D_ ) + , sparseResidency2Samples( sparseResidency2Samples_ ) + , sparseResidency4Samples( sparseResidency4Samples_ ) + , sparseResidency8Samples( sparseResidency8Samples_ ) + , sparseResidency16Samples( sparseResidency16Samples_ ) + , sparseResidencyAliased( sparseResidencyAliased_ ) + , variableMultisampleRate( variableMultisampleRate_ ) + , inheritedQueries( inheritedQueries_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : robustBufferAccess( rhs.robustBufferAccess ) + , fullDrawIndexUint32( rhs.fullDrawIndexUint32 ) + , imageCubeArray( rhs.imageCubeArray ) + , independentBlend( rhs.independentBlend ) + , geometryShader( rhs.geometryShader ) + , tessellationShader( rhs.tessellationShader ) + , sampleRateShading( rhs.sampleRateShading ) + , dualSrcBlend( rhs.dualSrcBlend ) + , logicOp( rhs.logicOp ) + , multiDrawIndirect( rhs.multiDrawIndirect ) + , drawIndirectFirstInstance( rhs.drawIndirectFirstInstance ) + , depthClamp( rhs.depthClamp ) + , depthBiasClamp( rhs.depthBiasClamp ) + , fillModeNonSolid( rhs.fillModeNonSolid ) + , depthBounds( rhs.depthBounds ) + , wideLines( rhs.wideLines ) + , largePoints( rhs.largePoints ) + , alphaToOne( rhs.alphaToOne ) + , multiViewport( rhs.multiViewport ) + , samplerAnisotropy( rhs.samplerAnisotropy ) + , textureCompressionETC2( rhs.textureCompressionETC2 ) + , textureCompressionASTC_LDR( rhs.textureCompressionASTC_LDR ) + , textureCompressionBC( rhs.textureCompressionBC ) + , occlusionQueryPrecise( rhs.occlusionQueryPrecise ) + , pipelineStatisticsQuery( rhs.pipelineStatisticsQuery ) + , vertexPipelineStoresAndAtomics( rhs.vertexPipelineStoresAndAtomics ) + , fragmentStoresAndAtomics( rhs.fragmentStoresAndAtomics ) + , shaderTessellationAndGeometryPointSize( rhs.shaderTessellationAndGeometryPointSize ) + , shaderImageGatherExtended( rhs.shaderImageGatherExtended ) + , shaderStorageImageExtendedFormats( rhs.shaderStorageImageExtendedFormats ) + , shaderStorageImageMultisample( rhs.shaderStorageImageMultisample ) + , shaderStorageImageReadWithoutFormat( rhs.shaderStorageImageReadWithoutFormat ) + , shaderStorageImageWriteWithoutFormat( rhs.shaderStorageImageWriteWithoutFormat ) + , shaderUniformBufferArrayDynamicIndexing( rhs.shaderUniformBufferArrayDynamicIndexing ) + , shaderSampledImageArrayDynamicIndexing( rhs.shaderSampledImageArrayDynamicIndexing ) + , shaderStorageBufferArrayDynamicIndexing( rhs.shaderStorageBufferArrayDynamicIndexing ) + , shaderStorageImageArrayDynamicIndexing( rhs.shaderStorageImageArrayDynamicIndexing ) + , shaderClipDistance( rhs.shaderClipDistance ) + , shaderCullDistance( rhs.shaderCullDistance ) + , shaderFloat64( rhs.shaderFloat64 ) + , shaderInt64( rhs.shaderInt64 ) + , shaderInt16( rhs.shaderInt16 ) + , shaderResourceResidency( rhs.shaderResourceResidency ) + , shaderResourceMinLod( rhs.shaderResourceMinLod ) + , sparseBinding( rhs.sparseBinding ) + , sparseResidencyBuffer( rhs.sparseResidencyBuffer ) + , sparseResidencyImage2D( rhs.sparseResidencyImage2D ) + , sparseResidencyImage3D( rhs.sparseResidencyImage3D ) + , sparseResidency2Samples( rhs.sparseResidency2Samples ) + , sparseResidency4Samples( rhs.sparseResidency4Samples ) + , sparseResidency8Samples( rhs.sparseResidency8Samples ) + , sparseResidency16Samples( rhs.sparseResidency16Samples ) + , sparseResidencyAliased( rhs.sparseResidencyAliased ) + , variableMultisampleRate( rhs.variableMultisampleRate ) + , inheritedQueries( rhs.inheritedQueries ) + {} + + PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PhysicalDeviceFeatures ) ); + return *this; + } + + PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFeatures& operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + robustBufferAccess = robustBufferAccess_; + return *this; + } + + PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT + { + fullDrawIndexUint32 = fullDrawIndexUint32_; + return *this; + } + + PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT + { + imageCubeArray = imageCubeArray_; + return *this; + } + + PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT + { + independentBlend = independentBlend_; + return *this; + } + + PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT + { + geometryShader = geometryShader_; + return *this; + } + + PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + tessellationShader = tessellationShader_; + return *this; + } + + PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT + { + sampleRateShading = sampleRateShading_; + return *this; + } + + PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT + { + dualSrcBlend = dualSrcBlend_; + return *this; + } + + PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT + { + multiDrawIndirect = multiDrawIndirect_; + return *this; + } + + PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT + { + drawIndirectFirstInstance = drawIndirectFirstInstance_; + return *this; + } + + PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthClamp = depthClamp_; + return *this; + } + + PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT + { + fillModeNonSolid = fillModeNonSolid_; + return *this; + } + + PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT + { + depthBounds = depthBounds_; + return *this; + } + + PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT + { + wideLines = wideLines_; + return *this; + } + + PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT + { + largePoints = largePoints_; + return *this; + } + + PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOne = alphaToOne_; + return *this; + } + + PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT + { + multiViewport = multiViewport_; + return *this; + } + + PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + samplerAnisotropy = samplerAnisotropy_; + return *this; + } + + PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionETC2 = textureCompressionETC2_; + return *this; + } + + PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_LDR = textureCompressionASTC_LDR_; + return *this; + } + + PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionBC = textureCompressionBC_; + return *this; + } + + PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT + { + occlusionQueryPrecise = occlusionQueryPrecise_; + return *this; + } + + PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatisticsQuery = pipelineStatisticsQuery_; + return *this; + } + + PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT + { + fragmentStoresAndAtomics = fragmentStoresAndAtomics_; + return *this; + } + + PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT + { + shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_; + return *this; + } + + PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT + { + shaderImageGatherExtended = shaderImageGatherExtended_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageMultisample = shaderStorageImageMultisample_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_; + return *this; + } + + PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderClipDistance = shaderClipDistance_; + return *this; + } + + PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT + { + shaderCullDistance = shaderCullDistance_; + return *this; + } + + PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat64 = shaderFloat64_; + return *this; + } + + PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt64 = shaderInt64_; + return *this; + } + + PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt16 = shaderInt16_; + return *this; + } + + PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceResidency = shaderResourceResidency_; + return *this; + } + + PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT + { + shaderResourceMinLod = shaderResourceMinLod_; + return *this; + } + + PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT + { + sparseBinding = sparseBinding_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyBuffer = sparseResidencyBuffer_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage2D = sparseResidencyImage2D_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyImage3D = sparseResidencyImage3D_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency2Samples = sparseResidency2Samples_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency4Samples = sparseResidency4Samples_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency8Samples = sparseResidency8Samples_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidency16Samples = sparseResidency16Samples_; + return *this; + } + + PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT + { + sparseResidencyAliased = sparseResidencyAliased_; + return *this; + } + + PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT + { + variableMultisampleRate = variableMultisampleRate_; + return *this; + } + + PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT + { + inheritedQueries = inheritedQueries_; + return *this; + } + + operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( robustBufferAccess == rhs.robustBufferAccess ) + && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 ) + && ( imageCubeArray == rhs.imageCubeArray ) + && ( independentBlend == rhs.independentBlend ) + && ( geometryShader == rhs.geometryShader ) + && ( tessellationShader == rhs.tessellationShader ) + && ( sampleRateShading == rhs.sampleRateShading ) + && ( dualSrcBlend == rhs.dualSrcBlend ) + && ( logicOp == rhs.logicOp ) + && ( multiDrawIndirect == rhs.multiDrawIndirect ) + && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance ) + && ( depthClamp == rhs.depthClamp ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( fillModeNonSolid == rhs.fillModeNonSolid ) + && ( depthBounds == rhs.depthBounds ) + && ( wideLines == rhs.wideLines ) + && ( largePoints == rhs.largePoints ) + && ( alphaToOne == rhs.alphaToOne ) + && ( multiViewport == rhs.multiViewport ) + && ( samplerAnisotropy == rhs.samplerAnisotropy ) + && ( textureCompressionETC2 == rhs.textureCompressionETC2 ) + && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR ) + && ( textureCompressionBC == rhs.textureCompressionBC ) + && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise ) + && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery ) + && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics ) + && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics ) + && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize ) + && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended ) + && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats ) + && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample ) + && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat ) + && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat ) + && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing ) + && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing ) + && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing ) + && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing ) + && ( shaderClipDistance == rhs.shaderClipDistance ) + && ( shaderCullDistance == rhs.shaderCullDistance ) + && ( shaderFloat64 == rhs.shaderFloat64 ) + && ( shaderInt64 == rhs.shaderInt64 ) + && ( shaderInt16 == rhs.shaderInt16 ) + && ( shaderResourceResidency == rhs.shaderResourceResidency ) + && ( shaderResourceMinLod == rhs.shaderResourceMinLod ) + && ( sparseBinding == rhs.sparseBinding ) + && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer ) + && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D ) + && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D ) + && ( sparseResidency2Samples == rhs.sparseResidency2Samples ) + && ( sparseResidency4Samples == rhs.sparseResidency4Samples ) + && ( sparseResidency8Samples == rhs.sparseResidency8Samples ) + && ( sparseResidency16Samples == rhs.sparseResidency16Samples ) + && ( sparseResidencyAliased == rhs.sparseResidencyAliased ) + && ( variableMultisampleRate == rhs.variableMultisampleRate ) + && ( inheritedQueries == rhs.inheritedQueries ); + } + + bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {}; + VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOp = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {}; + VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {}; + VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {}; + VULKAN_HPP_NAMESPACE::Bool32 wideLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 largePoints = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {}; + VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {}; + VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {}; + }; + static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceCreateInfo + { + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, + uint32_t queueCreateInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, + uint32_t enabledLayerCount_ = {}, + const char* const* ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char* const* ppEnabledExtensionNames_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueCreateInfoCount( queueCreateInfoCount_ ) + , pQueueCreateInfos( pQueueCreateInfos_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + , pEnabledFeatures( pEnabledFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , queueCreateInfoCount( rhs.queueCreateInfoCount ) + , pQueueCreateInfos( rhs.pQueueCreateInfos ) + , enabledLayerCount( rhs.enabledLayerCount ) + , ppEnabledLayerNames( rhs.ppEnabledLayerNames ) + , enabledExtensionCount( rhs.enabledExtensionCount ) + , ppEnabledExtensionNames( rhs.ppEnabledExtensionNames ) + , pEnabledFeatures( rhs.pEnabledFeatures ) + {} + + DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceCreateInfo ) - offsetof( DeviceCreateInfo, pNext ) ); + return *this; + } + + DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceCreateInfo& operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + queueCreateInfoCount = queueCreateInfoCount_; + return *this; + } + + DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT + { + pQueueCreateInfos = pQueueCreateInfos_; + return *this; + } + + DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + DeviceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + DeviceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledFeatures = pEnabledFeatures_; + return *this; + } + + operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceCreateInfo const& ) const = default; +#else + bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueCreateInfoCount == rhs.queueCreateInfoCount ) + && ( pQueueCreateInfos == rhs.pQueueCreateInfos ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ) + && ( pEnabledFeatures == rhs.pEnabledFeatures ); + } + + bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {}; + uint32_t queueCreateInfoCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {}; + uint32_t enabledLayerCount = {}; + const char* const* ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char* const* ppEnabledExtensionNames = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {}; + }; + static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceEventInfoEXT + { + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug ) VULKAN_HPP_NOEXCEPT + : deviceEvent( deviceEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceEvent( rhs.deviceEvent ) + {} + + DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceEventInfoEXT ) - offsetof( DeviceEventInfoEXT, pNext ) ); + return *this; + } + + DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceEventInfoEXT& operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT + { + deviceEvent = deviceEvent_; + return *this; + } + + operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceEventInfoEXT const& ) const = default; +#else + bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceEvent == rhs.deviceEvent ); + } + + bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug; + }; + static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGeneratedCommandsFeaturesNVX + { + VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ = {} ) VULKAN_HPP_NOEXCEPT + : computeBindingPointSupport( computeBindingPointSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsFeaturesNVX( DeviceGeneratedCommandsFeaturesNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , computeBindingPointSupport( rhs.computeBindingPointSupport ) + {} + + DeviceGeneratedCommandsFeaturesNVX & operator=( DeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsFeaturesNVX ) - offsetof( DeviceGeneratedCommandsFeaturesNVX, pNext ) ); + return *this; + } + + DeviceGeneratedCommandsFeaturesNVX( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGeneratedCommandsFeaturesNVX& operator=( VkDeviceGeneratedCommandsFeaturesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGeneratedCommandsFeaturesNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsFeaturesNVX & setComputeBindingPointSupport( VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport_ ) VULKAN_HPP_NOEXCEPT + { + computeBindingPointSupport = computeBindingPointSupport_; + return *this; + } + + operator VkDeviceGeneratedCommandsFeaturesNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGeneratedCommandsFeaturesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGeneratedCommandsFeaturesNVX const& ) const = default; +#else + bool operator==( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( computeBindingPointSupport == rhs.computeBindingPointSupport ); + } + + bool operator!=( DeviceGeneratedCommandsFeaturesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsFeaturesNVX; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeBindingPointSupport = {}; + }; + static_assert( sizeof( DeviceGeneratedCommandsFeaturesNVX ) == sizeof( VkDeviceGeneratedCommandsFeaturesNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGeneratedCommandsLimitsNVX + { + VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( uint32_t maxIndirectCommandsLayoutTokenCount_ = {}, + uint32_t maxObjectEntryCounts_ = {}, + uint32_t minSequenceCountBufferOffsetAlignment_ = {}, + uint32_t minSequenceIndexBufferOffsetAlignment_ = {}, + uint32_t minCommandsTokenBufferOffsetAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : maxIndirectCommandsLayoutTokenCount( maxIndirectCommandsLayoutTokenCount_ ) + , maxObjectEntryCounts( maxObjectEntryCounts_ ) + , minSequenceCountBufferOffsetAlignment( minSequenceCountBufferOffsetAlignment_ ) + , minSequenceIndexBufferOffsetAlignment( minSequenceIndexBufferOffsetAlignment_ ) + , minCommandsTokenBufferOffsetAlignment( minCommandsTokenBufferOffsetAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGeneratedCommandsLimitsNVX( DeviceGeneratedCommandsLimitsNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxIndirectCommandsLayoutTokenCount( rhs.maxIndirectCommandsLayoutTokenCount ) + , maxObjectEntryCounts( rhs.maxObjectEntryCounts ) + , minSequenceCountBufferOffsetAlignment( rhs.minSequenceCountBufferOffsetAlignment ) + , minSequenceIndexBufferOffsetAlignment( rhs.minSequenceIndexBufferOffsetAlignment ) + , minCommandsTokenBufferOffsetAlignment( rhs.minCommandsTokenBufferOffsetAlignment ) + {} + + DeviceGeneratedCommandsLimitsNVX & operator=( DeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGeneratedCommandsLimitsNVX ) - offsetof( DeviceGeneratedCommandsLimitsNVX, pNext ) ); + return *this; + } + + DeviceGeneratedCommandsLimitsNVX( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGeneratedCommandsLimitsNVX& operator=( VkDeviceGeneratedCommandsLimitsNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setMaxIndirectCommandsLayoutTokenCount( uint32_t maxIndirectCommandsLayoutTokenCount_ ) VULKAN_HPP_NOEXCEPT + { + maxIndirectCommandsLayoutTokenCount = maxIndirectCommandsLayoutTokenCount_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setMaxObjectEntryCounts( uint32_t maxObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT + { + maxObjectEntryCounts = maxObjectEntryCounts_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setMinSequenceCountBufferOffsetAlignment( uint32_t minSequenceCountBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minSequenceCountBufferOffsetAlignment = minSequenceCountBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setMinSequenceIndexBufferOffsetAlignment( uint32_t minSequenceIndexBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minSequenceIndexBufferOffsetAlignment = minSequenceIndexBufferOffsetAlignment_; + return *this; + } + + DeviceGeneratedCommandsLimitsNVX & setMinCommandsTokenBufferOffsetAlignment( uint32_t minCommandsTokenBufferOffsetAlignment_ ) VULKAN_HPP_NOEXCEPT + { + minCommandsTokenBufferOffsetAlignment = minCommandsTokenBufferOffsetAlignment_; + return *this; + } + + operator VkDeviceGeneratedCommandsLimitsNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGeneratedCommandsLimitsNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGeneratedCommandsLimitsNVX const& ) const = default; +#else + bool operator==( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxIndirectCommandsLayoutTokenCount == rhs.maxIndirectCommandsLayoutTokenCount ) + && ( maxObjectEntryCounts == rhs.maxObjectEntryCounts ) + && ( minSequenceCountBufferOffsetAlignment == rhs.minSequenceCountBufferOffsetAlignment ) + && ( minSequenceIndexBufferOffsetAlignment == rhs.minSequenceIndexBufferOffsetAlignment ) + && ( minCommandsTokenBufferOffsetAlignment == rhs.minCommandsTokenBufferOffsetAlignment ); + } + + bool operator!=( DeviceGeneratedCommandsLimitsNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGeneratedCommandsLimitsNVX; + const void* pNext = {}; + uint32_t maxIndirectCommandsLayoutTokenCount = {}; + uint32_t maxObjectEntryCounts = {}; + uint32_t minSequenceCountBufferOffsetAlignment = {}; + uint32_t minSequenceIndexBufferOffsetAlignment = {}; + uint32_t minCommandsTokenBufferOffsetAlignment = {}; + }; + static_assert( sizeof( DeviceGeneratedCommandsLimitsNVX ) == sizeof( VkDeviceGeneratedCommandsLimitsNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupBindSparseInfo + { + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, + uint32_t memoryDeviceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : resourceDeviceIndex( resourceDeviceIndex_ ) + , memoryDeviceIndex( memoryDeviceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , resourceDeviceIndex( rhs.resourceDeviceIndex ) + , memoryDeviceIndex( rhs.memoryDeviceIndex ) + {} + + DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupBindSparseInfo ) - offsetof( DeviceGroupBindSparseInfo, pNext ) ); + return *this; + } + + DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupBindSparseInfo& operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + resourceDeviceIndex = resourceDeviceIndex_; + return *this; + } + + DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryDeviceIndex = memoryDeviceIndex_; + return *this; + } + + operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default; +#else + bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( resourceDeviceIndex == rhs.resourceDeviceIndex ) + && ( memoryDeviceIndex == rhs.memoryDeviceIndex ); + } + + bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo; + const void* pNext = {}; + uint32_t resourceDeviceIndex = {}; + uint32_t memoryDeviceIndex = {}; + }; + static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupCommandBufferBeginInfo + { + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceMask( rhs.deviceMask ) + {} + + DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupCommandBufferBeginInfo ) - offsetof( DeviceGroupCommandBufferBeginInfo, pNext ) ); + return *this; + } + + DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupCommandBufferBeginInfo& operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default; +#else + bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo; + const void* pNext = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupDeviceCreateInfo + { + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, + const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , pPhysicalDevices( pPhysicalDevices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , physicalDeviceCount( rhs.physicalDeviceCount ) + , pPhysicalDevices( rhs.pPhysicalDevices ) + {} + + DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupDeviceCreateInfo ) - offsetof( DeviceGroupDeviceCreateInfo, pNext ) ); + return *this; + } + + DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupDeviceCreateInfo& operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT + { + physicalDeviceCount = physicalDeviceCount_; + return *this; + } + + DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT + { + pPhysicalDevices = pPhysicalDevices_; + return *this; + } + + operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default; +#else + bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( pPhysicalDevices == rhs.pPhysicalDevices ); + } + + bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo; + const void* pNext = {}; + uint32_t physicalDeviceCount = {}; + const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {}; + }; + static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupPresentCapabilitiesKHR + { + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const& presentMask_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + : presentMask{} + , modes( modes_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( presentMask, presentMask_ ); + } + + VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , presentMask{} + , modes( rhs.modes ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( presentMask, rhs.presentMask ); + } + + DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentCapabilitiesKHR ) - offsetof( DeviceGroupPresentCapabilitiesKHR, pNext ) ); + return *this; + } + + DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupPresentCapabilitiesKHR& operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default; +#else + bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( presentMask, rhs.presentMask, VK_MAX_DEVICE_GROUP_SIZE * sizeof( uint32_t ) ) == 0 ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR; + const void* pNext = {}; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE] = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupPresentInfoKHR + { + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( uint32_t swapchainCount_ = {}, + const uint32_t* pDeviceMasks_ = {}, + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pDeviceMasks( pDeviceMasks_ ) + , mode( mode_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchainCount( rhs.swapchainCount ) + , pDeviceMasks( rhs.pDeviceMasks ) + , mode( rhs.mode ) + {} + + DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupPresentInfoKHR ) - offsetof( DeviceGroupPresentInfoKHR, pNext ) ); + return *this; + } + + DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupPresentInfoKHR& operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceMasks = pDeviceMasks_; + return *this; + } + + DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default; +#else + bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pDeviceMasks == rhs.pDeviceMasks ) + && ( mode == rhs.mode ); + } + + bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR; + const void* pNext = {}; + uint32_t swapchainCount = {}; + const uint32_t* pDeviceMasks = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal; + }; + static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupRenderPassBeginInfo + { + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_ = {}, + uint32_t deviceRenderAreaCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceMask( deviceMask_ ) + , deviceRenderAreaCount( deviceRenderAreaCount_ ) + , pDeviceRenderAreas( pDeviceRenderAreas_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceMask( rhs.deviceMask ) + , deviceRenderAreaCount( rhs.deviceRenderAreaCount ) + , pDeviceRenderAreas( rhs.pDeviceRenderAreas ) + {} + + DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupRenderPassBeginInfo ) - offsetof( DeviceGroupRenderPassBeginInfo, pNext ) ); + return *this; + } + + DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupRenderPassBeginInfo& operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT + { + deviceRenderAreaCount = deviceRenderAreaCount_; + return *this; + } + + DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT + { + pDeviceRenderAreas = pDeviceRenderAreas_; + return *this; + } + + operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default; +#else + bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceMask == rhs.deviceMask ) + && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount ) + && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas ); + } + + bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo; + const void* pNext = {}; + uint32_t deviceMask = {}; + uint32_t deviceRenderAreaCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {}; + }; + static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupSubmitInfo + { + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, + uint32_t commandBufferCount_ = {}, + const uint32_t* pCommandBufferDeviceMasks_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const uint32_t* pSignalSemaphoreDeviceIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , waitSemaphoreCount( rhs.waitSemaphoreCount ) + , pWaitSemaphoreDeviceIndices( rhs.pWaitSemaphoreDeviceIndices ) + , commandBufferCount( rhs.commandBufferCount ) + , pCommandBufferDeviceMasks( rhs.pCommandBufferDeviceMasks ) + , signalSemaphoreCount( rhs.signalSemaphoreCount ) + , pSignalSemaphoreDeviceIndices( rhs.pSignalSemaphoreDeviceIndices ) + {} + + DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSubmitInfo ) - offsetof( DeviceGroupSubmitInfo, pNext ) ); + return *this; + } + + DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupSubmitInfo& operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_; + return *this; + } + + DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_; + return *this; + } + + DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_; + return *this; + } + + operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupSubmitInfo const& ) const = default; +#else + bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices ); + } + + bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const uint32_t* pWaitSemaphoreDeviceIndices = {}; + uint32_t commandBufferCount = {}; + const uint32_t* pCommandBufferDeviceMasks = {}; + uint32_t signalSemaphoreCount = {}; + const uint32_t* pSignalSemaphoreDeviceIndices = {}; + }; + static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceGroupSwapchainCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {} ) VULKAN_HPP_NOEXCEPT + : modes( modes_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , modes( rhs.modes ) + {} + + DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceGroupSwapchainCreateInfoKHR ) - offsetof( DeviceGroupSwapchainCreateInfoKHR, pNext ) ); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceGroupSwapchainCreateInfoKHR& operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT + { + modes = modes_; + return *this; + } + + operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( modes == rhs.modes ); + } + + bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {}; + }; + static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceMemoryOverallocationCreateInfoAMD + { + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault ) VULKAN_HPP_NOEXCEPT + : overallocationBehavior( overallocationBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , overallocationBehavior( rhs.overallocationBehavior ) + {} + + DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceMemoryOverallocationCreateInfoAMD ) - offsetof( DeviceMemoryOverallocationCreateInfoAMD, pNext ) ); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceMemoryOverallocationCreateInfoAMD& operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT + { + overallocationBehavior = overallocationBehavior_; + return *this; + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default; +#else + bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( overallocationBehavior == rhs.overallocationBehavior ); + } + + bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault; + }; + static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceQueueGlobalPriorityCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow ) VULKAN_HPP_NOEXCEPT + : globalPriority( globalPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , globalPriority( rhs.globalPriority ) + {} + + DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) - offsetof( DeviceQueueGlobalPriorityCreateInfoEXT, pNext ) ); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceQueueGlobalPriorityCreateInfoEXT& operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT + { + globalPriority = globalPriority_; + return *this; + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default; +#else + bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( globalPriority == rhs.globalPriority ); + } + + bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow; + }; + static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DeviceQueueInfo2 + { + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, + uint32_t queueFamilyIndex_ = {}, + uint32_t queueIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queueFamilyIndex( queueFamilyIndex_ ) + , queueIndex( queueIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , queueFamilyIndex( rhs.queueFamilyIndex ) + , queueIndex( rhs.queueIndex ) + {} + + DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DeviceQueueInfo2 ) - offsetof( DeviceQueueInfo2, pNext ) ); + return *this; + } + + DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DeviceQueueInfo2& operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndex = queueFamilyIndex_; + return *this; + } + + DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT + { + queueIndex = queueIndex_; + return *this; + } + + operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DeviceQueueInfo2 const& ) const = default; +#else + bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queueFamilyIndex == rhs.queueFamilyIndex ) + && ( queueIndex == rhs.queueIndex ); + } + + bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {}; + uint32_t queueFamilyIndex = {}; + uint32_t queueIndex = {}; + }; + static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDispatch.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDispatch.hpp new file mode 100644 index 00000000..9b079016 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDispatch.hpp @@ -0,0 +1,150 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DispatchIndirectCommand + { + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, + uint32_t y_ = {}, + uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + {} + + VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + , z( rhs.z ) + {} + + DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DispatchIndirectCommand ) ); + return *this; + } + + DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DispatchIndirectCommand& operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DispatchIndirectCommand const& ) const = default; +#else + bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ); + } + + bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t x = {}; + uint32_t y = {}; + uint32_t z = {}; + }; + static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDisplay.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDisplay.hpp new file mode 100644 index 00000000..86b6d9b4 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDisplay.hpp @@ -0,0 +1,1324 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DisplayEventInfoEXT + { + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut ) VULKAN_HPP_NOEXCEPT + : displayEvent( displayEvent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , displayEvent( rhs.displayEvent ) + {} + + DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayEventInfoEXT ) - offsetof( DisplayEventInfoEXT, pNext ) ); + return *this; + } + + DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayEventInfoEXT& operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT + { + displayEvent = displayEvent_; + return *this; + } + + operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayEventInfoEXT const& ) const = default; +#else + bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayEvent == rhs.displayEvent ); + } + + bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut; + }; + static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeParametersKHR + { + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, + uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT + : visibleRegion( visibleRegion_ ) + , refreshRate( refreshRate_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : visibleRegion( rhs.visibleRegion ) + , refreshRate( rhs.refreshRate ) + {} + + DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DisplayModeParametersKHR ) ); + return *this; + } + + DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayModeParametersKHR& operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ ) VULKAN_HPP_NOEXCEPT + { + visibleRegion = visibleRegion_; + return *this; + } + + DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT + { + refreshRate = refreshRate_; + return *this; + } + + operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeParametersKHR const& ) const = default; +#else + bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( visibleRegion == rhs.visibleRegion ) + && ( refreshRate == rhs.refreshRate ); + } + + bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {}; + uint32_t refreshRate = {}; + }; + static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , parameters( rhs.parameters ) + {} + + DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayModeCreateInfoKHR ) - offsetof( DisplayModeCreateInfoKHR, pNext ) ); + return *this; + } + + DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayModeCreateInfoKHR& operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ ) VULKAN_HPP_NOEXCEPT + { + parameters = parameters_; + return *this; + } + + operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default; +#else + bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModePropertiesKHR + { + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT + : displayMode( displayMode_ ) + , parameters( parameters_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : displayMode( rhs.displayMode ) + , parameters( rhs.parameters ) + {} + + DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DisplayModePropertiesKHR ) ); + return *this; + } + + DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayModePropertiesKHR& operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModePropertiesKHR const& ) const = default; +#else + bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( displayMode == rhs.displayMode ) + && ( parameters == rhs.parameters ); + } + + bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {}; + }; + static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayModeProperties2KHR + { + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayModeProperties( displayModeProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , displayModeProperties( rhs.displayModeProperties ) + {} + + DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayModeProperties2KHR ) - offsetof( DisplayModeProperties2KHR, pNext ) ); + return *this; + } + + DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayModeProperties2KHR& operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayModeProperties2KHR const& ) const = default; +#else + bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayModeProperties == rhs.displayModeProperties ); + } + + bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {}; + }; + static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayNativeHdrSurfaceCapabilitiesAMD + { + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {} ) VULKAN_HPP_NOEXCEPT + : localDimmingSupport( localDimmingSupport_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , localDimmingSupport( rhs.localDimmingSupport ) + {} + + DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) - offsetof( DisplayNativeHdrSurfaceCapabilitiesAMD, pNext ) ); + return *this; + } + + DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayNativeHdrSurfaceCapabilitiesAMD& operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default; +#else + bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( localDimmingSupport == rhs.localDimmingSupport ); + } + + bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {}; + }; + static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneCapabilitiesKHR + { + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( supportedAlpha_ ) + , minSrcPosition( minSrcPosition_ ) + , maxSrcPosition( maxSrcPosition_ ) + , minSrcExtent( minSrcExtent_ ) + , maxSrcExtent( maxSrcExtent_ ) + , minDstPosition( minDstPosition_ ) + , maxDstPosition( maxDstPosition_ ) + , minDstExtent( minDstExtent_ ) + , maxDstExtent( maxDstExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : supportedAlpha( rhs.supportedAlpha ) + , minSrcPosition( rhs.minSrcPosition ) + , maxSrcPosition( rhs.maxSrcPosition ) + , minSrcExtent( rhs.minSrcExtent ) + , maxSrcExtent( rhs.maxSrcExtent ) + , minDstPosition( rhs.minDstPosition ) + , maxDstPosition( rhs.maxDstPosition ) + , minDstExtent( rhs.minDstExtent ) + , maxDstExtent( rhs.maxDstExtent ) + {} + + DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DisplayPlaneCapabilitiesKHR ) ); + return *this; + } + + DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPlaneCapabilitiesKHR& operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default; +#else + bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( supportedAlpha == rhs.supportedAlpha ) + && ( minSrcPosition == rhs.minSrcPosition ) + && ( maxSrcPosition == rhs.maxSrcPosition ) + && ( minSrcExtent == rhs.minSrcExtent ) + && ( maxSrcExtent == rhs.maxSrcExtent ) + && ( minDstPosition == rhs.minDstPosition ) + && ( maxDstPosition == rhs.maxDstPosition ) + && ( minDstExtent == rhs.minDstExtent ) + && ( maxDstExtent == rhs.maxDstExtent ); + } + + bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {}; + VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {}; + VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {}; + VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {}; + VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {}; + }; + static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneCapabilities2KHR + { + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : capabilities( capabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , capabilities( rhs.capabilities ) + {} + + DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneCapabilities2KHR ) - offsetof( DisplayPlaneCapabilities2KHR, pNext ) ); + return *this; + } + + DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPlaneCapabilities2KHR& operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( capabilities == rhs.capabilities ); + } + + bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {}; + }; + static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneInfo2KHR + { + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, + uint32_t planeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : mode( mode_ ) + , planeIndex( planeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , mode( rhs.mode ) + , planeIndex( rhs.planeIndex ) + {} + + DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneInfo2KHR ) - offsetof( DisplayPlaneInfo2KHR, pNext ) ); + return *this; + } + + DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPlaneInfo2KHR& operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( mode == rhs.mode ) + && ( planeIndex == rhs.planeIndex ); + } + + bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {}; + uint32_t planeIndex = {}; + }; + static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlanePropertiesKHR + { + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, + uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : currentDisplay( currentDisplay_ ) + , currentStackIndex( currentStackIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : currentDisplay( rhs.currentDisplay ) + , currentStackIndex( rhs.currentStackIndex ) + {} + + DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DisplayPlanePropertiesKHR ) ); + return *this; + } + + DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPlanePropertiesKHR& operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default; +#else + bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( currentDisplay == rhs.currentDisplay ) + && ( currentStackIndex == rhs.currentStackIndex ); + } + + bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {}; + uint32_t currentStackIndex = {}; + }; + static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPlaneProperties2KHR + { + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPlaneProperties( displayPlaneProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , displayPlaneProperties( rhs.displayPlaneProperties ) + {} + + DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayPlaneProperties2KHR ) - offsetof( DisplayPlaneProperties2KHR, pNext ) ); + return *this; + } + + DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPlaneProperties2KHR& operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default; +#else + bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPlaneProperties == rhs.displayPlaneProperties ); + } + + bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {}; + }; + static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPowerInfoEXT + { + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff ) VULKAN_HPP_NOEXCEPT + : powerState( powerState_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , powerState( rhs.powerState ) + {} + + DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayPowerInfoEXT ) - offsetof( DisplayPowerInfoEXT, pNext ) ); + return *this; + } + + DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPowerInfoEXT& operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT + { + powerState = powerState_; + return *this; + } + + operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPowerInfoEXT const& ) const = default; +#else + bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( powerState == rhs.powerState ); + } + + bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff; + }; + static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPresentInfoKHR + { + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcRect( srcRect_ ) + , dstRect( dstRect_ ) + , persistent( persistent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcRect( rhs.srcRect ) + , dstRect( rhs.dstRect ) + , persistent( rhs.persistent ) + {} + + DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayPresentInfoKHR ) - offsetof( DisplayPresentInfoKHR, pNext ) ); + return *this; + } + + DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPresentInfoKHR& operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D srcRect_ ) VULKAN_HPP_NOEXCEPT + { + srcRect = srcRect_; + return *this; + } + + DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D dstRect_ ) VULKAN_HPP_NOEXCEPT + { + dstRect = dstRect_; + return *this; + } + + DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT + { + persistent = persistent_; + return *this; + } + + operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPresentInfoKHR const& ) const = default; +#else + bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcRect == rhs.srcRect ) + && ( dstRect == rhs.dstRect ) + && ( persistent == rhs.persistent ); + } + + bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Rect2D srcRect = {}; + VULKAN_HPP_NAMESPACE::Rect2D dstRect = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistent = {}; + }; + static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayPropertiesKHR + { + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, + const char* displayName_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT + : display( display_ ) + , displayName( displayName_ ) + , physicalDimensions( physicalDimensions_ ) + , physicalResolution( physicalResolution_ ) + , supportedTransforms( supportedTransforms_ ) + , planeReorderPossible( planeReorderPossible_ ) + , persistentContent( persistentContent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : display( rhs.display ) + , displayName( rhs.displayName ) + , physicalDimensions( rhs.physicalDimensions ) + , physicalResolution( rhs.physicalResolution ) + , supportedTransforms( rhs.supportedTransforms ) + , planeReorderPossible( rhs.planeReorderPossible ) + , persistentContent( rhs.persistentContent ) + {} + + DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DisplayPropertiesKHR ) ); + return *this; + } + + DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayPropertiesKHR& operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayPropertiesKHR const& ) const = default; +#else + bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( display == rhs.display ) + && ( displayName == rhs.displayName ) + && ( physicalDimensions == rhs.physicalDimensions ) + && ( physicalResolution == rhs.physicalResolution ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( planeReorderPossible == rhs.planeReorderPossible ) + && ( persistentContent == rhs.persistentContent ); + } + + bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DisplayKHR display = {}; + const char* displayName = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {}; + VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {}; + VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {}; + }; + static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplayProperties2KHR + { + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : displayProperties( displayProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , displayProperties( rhs.displayProperties ) + {} + + DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplayProperties2KHR ) - offsetof( DisplayProperties2KHR, pNext ) ); + return *this; + } + + DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplayProperties2KHR& operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplayProperties2KHR const& ) const = default; +#else + bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayProperties == rhs.displayProperties ); + } + + bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {}; + }; + static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DisplaySurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, + uint32_t planeIndex_ = {}, + uint32_t planeStackIndex_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + float globalAlpha_ = {}, + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , displayMode( displayMode_ ) + , planeIndex( planeIndex_ ) + , planeStackIndex( planeStackIndex_ ) + , transform( transform_ ) + , globalAlpha( globalAlpha_ ) + , alphaMode( alphaMode_ ) + , imageExtent( imageExtent_ ) + {} + + VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , displayMode( rhs.displayMode ) + , planeIndex( rhs.planeIndex ) + , planeStackIndex( rhs.planeStackIndex ) + , transform( rhs.transform ) + , globalAlpha( rhs.globalAlpha ) + , alphaMode( rhs.alphaMode ) + , imageExtent( rhs.imageExtent ) + {} + + DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DisplaySurfaceCreateInfoKHR ) - offsetof( DisplaySurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DisplaySurfaceCreateInfoKHR& operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT + { + displayMode = displayMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeIndex = planeIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT + { + planeStackIndex = planeStackIndex_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT + { + transform = transform_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT + { + globalAlpha = globalAlpha_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT + { + alphaMode = alphaMode_; + return *this; + } + + DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( displayMode == rhs.displayMode ) + && ( planeIndex == rhs.planeIndex ) + && ( planeStackIndex == rhs.planeStackIndex ) + && ( transform == rhs.transform ) + && ( globalAlpha == rhs.globalAlpha ) + && ( alphaMode == rhs.alphaMode ) + && ( imageExtent == rhs.imageExtent ); + } + + bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {}; + uint32_t planeIndex = {}; + uint32_t planeStackIndex = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + float globalAlpha = {}; + VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + }; + static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDraw.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDraw.hpp new file mode 100644 index 00000000..488e8f2a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDraw.hpp @@ -0,0 +1,345 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDraw.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DrawIndexedIndirectCommand + { + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( uint32_t indexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstIndex_ = {}, + int32_t vertexOffset_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : indexCount( indexCount_ ) + , instanceCount( instanceCount_ ) + , firstIndex( firstIndex_ ) + , vertexOffset( vertexOffset_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const& rhs ) VULKAN_HPP_NOEXCEPT + : indexCount( rhs.indexCount ) + , instanceCount( rhs.instanceCount ) + , firstIndex( rhs.firstIndex ) + , vertexOffset( rhs.vertexOffset ) + , firstInstance( rhs.firstInstance ) + {} + + DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DrawIndexedIndirectCommand ) ); + return *this; + } + + DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DrawIndexedIndirectCommand& operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT + { + indexCount = indexCount_; + return *this; + } + + DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT + { + firstIndex = firstIndex_; + return *this; + } + + DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT + { + vertexOffset = vertexOffset_; + return *this; + } + + DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawIndexedIndirectCommand const& ) const = default; +#else + bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( indexCount == rhs.indexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstIndex == rhs.firstIndex ) + && ( vertexOffset == rhs.vertexOffset ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t indexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstIndex = {}; + int32_t vertexOffset = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawIndirectCommand + { + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( uint32_t vertexCount_ = {}, + uint32_t instanceCount_ = {}, + uint32_t firstVertex_ = {}, + uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexCount( vertexCount_ ) + , instanceCount( instanceCount_ ) + , firstVertex( firstVertex_ ) + , firstInstance( firstInstance_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const& rhs ) VULKAN_HPP_NOEXCEPT + : vertexCount( rhs.vertexCount ) + , instanceCount( rhs.instanceCount ) + , firstVertex( rhs.firstVertex ) + , firstInstance( rhs.firstInstance ) + {} + + DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DrawIndirectCommand ) ); + return *this; + } + + DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DrawIndirectCommand& operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexCount = vertexCount_; + return *this; + } + + DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT + { + instanceCount = instanceCount_; + return *this; + } + + DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT + { + firstVertex = firstVertex_; + return *this; + } + + DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT + { + firstInstance = firstInstance_; + return *this; + } + + operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawIndirectCommand const& ) const = default; +#else + bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( vertexCount == rhs.vertexCount ) + && ( instanceCount == rhs.instanceCount ) + && ( firstVertex == rhs.firstVertex ) + && ( firstInstance == rhs.firstInstance ); + } + + bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexCount = {}; + uint32_t instanceCount = {}; + uint32_t firstVertex = {}; + uint32_t firstInstance = {}; + }; + static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrawMeshTasksIndirectCommandNV + { + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, + uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT + : taskCount( taskCount_ ) + , firstTask( firstTask_ ) + {} + + VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const& rhs ) VULKAN_HPP_NOEXCEPT + : taskCount( rhs.taskCount ) + , firstTask( rhs.firstTask ) + {} + + DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DrawMeshTasksIndirectCommandNV ) ); + return *this; + } + + DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DrawMeshTasksIndirectCommandNV& operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT + { + taskCount = taskCount_; + return *this; + } + + DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT + { + firstTask = firstTask_; + return *this; + } + + operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default; +#else + bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( taskCount == rhs.taskCount ) + && ( firstTask == rhs.firstTask ); + } + + bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t taskCount = {}; + uint32_t firstTask = {}; + }; + static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkDrm.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDrm.hpp new file mode 100644 index 00000000..514539cd --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkDrm.hpp @@ -0,0 +1,203 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct DrmFormatModifierPropertiesEXT + { + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( rhs.drmFormatModifier ) + , drmFormatModifierPlaneCount( rhs.drmFormatModifierPlaneCount ) + , drmFormatModifierTilingFeatures( rhs.drmFormatModifierTilingFeatures ) + {} + + DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( DrmFormatModifierPropertiesEXT ) ); + return *this; + } + + DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DrmFormatModifierPropertiesEXT& operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures ); + } + + bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct DrmFormatModifierPropertiesListEXT + { + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , drmFormatModifierCount( rhs.drmFormatModifierCount ) + , pDrmFormatModifierProperties( rhs.pDrmFormatModifierProperties ) + {} + + DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( DrmFormatModifierPropertiesListEXT ) - offsetof( DrmFormatModifierPropertiesListEXT, pNext ) ); + return *this; + } + + DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + DrmFormatModifierPropertiesListEXT& operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default; +#else + bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties ); + } + + bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT; + void* pNext = {}; + uint32_t drmFormatModifierCount = {}; + VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {}; + }; + static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkEvent.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkEvent.hpp new file mode 100644 index 00000000..889140bd --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkEvent.hpp @@ -0,0 +1,143 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct EventCreateInfo + { + VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + {} + + EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( EventCreateInfo ) - offsetof( EventCreateInfo, pNext ) ); + return *this; + } + + EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + EventCreateInfo& operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( EventCreateInfo const& ) const = default; +#else + bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {}; + }; + static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkExport.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExport.hpp new file mode 100644 index 00000000..5aea2c3d --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExport.hpp @@ -0,0 +1,748 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ExportFenceCreateInfo + { + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportFenceCreateInfo ) - offsetof( ExportFenceCreateInfo, pNext ) ); + return *this; + } + + ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportFenceCreateInfo& operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportFenceCreateInfo const& ) const = default; +#else + bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportFenceWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pAttributes( rhs.pAttributes ) + , dwAccess( rhs.dwAccess ) + , name( rhs.name ) + {} + + ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportFenceWin32HandleInfoKHR ) - offsetof( ExportFenceWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportFenceWin32HandleInfoKHR& operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportMemoryAllocateInfo + { + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfo ) - offsetof( ExportMemoryAllocateInfo, pNext ) ); + return *this; + } + + ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportMemoryAllocateInfo& operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryAllocateInfo const& ) const = default; +#else + bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExportMemoryAllocateInfoNV + { + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryAllocateInfoNV ) - offsetof( ExportMemoryAllocateInfoNV, pNext ) ); + return *this; + } + + ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportMemoryAllocateInfoNV& operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default; +#else + bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pAttributes( rhs.pAttributes ) + , dwAccess( rhs.dwAccess ) + , name( rhs.name ) + {} + + ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoKHR ) - offsetof( ExportMemoryWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportMemoryWin32HandleInfoKHR& operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportMemoryWin32HandleInfoNV + { + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES* pAttributes_ = {}, + DWORD dwAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pAttributes( rhs.pAttributes ) + , dwAccess( rhs.dwAccess ) + {} + + ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportMemoryWin32HandleInfoNV ) - offsetof( ExportMemoryWin32HandleInfoNV, pNext ) ); + return *this; + } + + ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportMemoryWin32HandleInfoNV& operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default; +#else + bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ); + } + + bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + }; + static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ExportSemaphoreCreateInfo + { + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportSemaphoreCreateInfo ) - offsetof( ExportSemaphoreCreateInfo, pNext ) ); + return *this; + } + + ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportSemaphoreCreateInfo& operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default; +#else + bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ExportSemaphoreWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( const SECURITY_ATTRIBUTES* pAttributes_ = {}, + DWORD dwAccess_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : pAttributes( pAttributes_ ) + , dwAccess( dwAccess_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pAttributes( rhs.pAttributes ) + , dwAccess( rhs.dwAccess ) + , name( rhs.name ) + {} + + ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExportSemaphoreWin32HandleInfoKHR ) - offsetof( ExportSemaphoreWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExportSemaphoreWin32HandleInfoKHR& operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT + { + pAttributes = pAttributes_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT + { + dwAccess = dwAccess_; + return *this; + } + + ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pAttributes == rhs.pAttributes ) + && ( dwAccess == rhs.dwAccess ) + && ( name == rhs.name ); + } + + bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR; + const void* pNext = {}; + const SECURITY_ATTRIBUTES* pAttributes = {}; + DWORD dwAccess = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtension.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtension.hpp new file mode 100644 index 00000000..4840a810 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtension.hpp @@ -0,0 +1,137 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ExtensionProperties + { + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const& extensionName_ = {}, + uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : extensionName{} + , specVersion( specVersion_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( extensionName, extensionName_ ); + } + + VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : extensionName{} + , specVersion( rhs.specVersion ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( extensionName, rhs.extensionName ); + } + + ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ExtensionProperties ) ); + return *this; + } + + ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExtensionProperties& operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExtensionProperties const& ) const = default; +#else + bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memcmp( extensionName, rhs.extensionName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ); + } + + bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + char extensionName[VK_MAX_EXTENSION_NAME_SIZE] = {}; + uint32_t specVersion = {}; + }; + static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtent.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtent.hpp new file mode 100644 index 00000000..fcba8e32 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExtent.hpp @@ -0,0 +1,73 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkExternal.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExternal.hpp new file mode 100644 index 00000000..bf7b5225 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkExternal.hpp @@ -0,0 +1,856 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkExternal.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ExternalMemoryProperties + { + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : externalMemoryFeatures( rhs.externalMemoryFeatures ) + , exportFromImportedHandleTypes( rhs.exportFromImportedHandleTypes ) + , compatibleHandleTypes( rhs.compatibleHandleTypes ) + {} + + ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ExternalMemoryProperties ) ); + return *this; + } + + ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalMemoryProperties& operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryProperties const& ) const = default; +#else + bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalBufferProperties + { + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , externalMemoryProperties( rhs.externalMemoryProperties ) + {} + + ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalBufferProperties ) - offsetof( ExternalBufferProperties, pNext ) ); + return *this; + } + + ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalBufferProperties& operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalBufferProperties const& ) const = default; +#else + bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalFenceProperties + { + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalFenceFeatures( externalFenceFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , exportFromImportedHandleTypes( rhs.exportFromImportedHandleTypes ) + , compatibleHandleTypes( rhs.compatibleHandleTypes ) + , externalFenceFeatures( rhs.externalFenceFeatures ) + {} + + ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalFenceProperties ) - offsetof( ExternalFenceProperties, pNext ) ); + return *this; + } + + ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalFenceProperties& operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalFenceProperties const& ) const = default; +#else + bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalFenceFeatures == rhs.externalFenceFeatures ); + } + + bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {}; + }; + static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ExternalFormatANDROID + { + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : externalFormat( externalFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , externalFormat( rhs.externalFormat ) + {} + + ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalFormatANDROID ) - offsetof( ExternalFormatANDROID, pNext ) ); + return *this; + } + + ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalFormatANDROID& operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT + { + externalFormat = externalFormat_; + return *this; + } + + operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalFormatANDROID const& ) const = default; +#else + bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalFormat == rhs.externalFormat ); + } + + bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID; + void* pNext = {}; + uint64_t externalFormat = {}; + }; + static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ExternalImageFormatProperties + { + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : externalMemoryProperties( externalMemoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , externalMemoryProperties( rhs.externalMemoryProperties ) + {} + + ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalImageFormatProperties ) - offsetof( ExternalImageFormatProperties, pNext ) ); + return *this; + } + + ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalImageFormatProperties& operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalImageFormatProperties const& ) const = default; +#else + bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( externalMemoryProperties == rhs.externalMemoryProperties ); + } + + bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {}; + }; + static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageFormatProperties + { + VULKAN_HPP_CONSTEXPR ImageFormatProperties( VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, + uint32_t maxMipLevels_ = {}, + uint32_t maxArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxExtent( maxExtent_ ) + , maxMipLevels( maxMipLevels_ ) + , maxArrayLayers( maxArrayLayers_ ) + , sampleCounts( sampleCounts_ ) + , maxResourceSize( maxResourceSize_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : maxExtent( rhs.maxExtent ) + , maxMipLevels( rhs.maxMipLevels ) + , maxArrayLayers( rhs.maxArrayLayers ) + , sampleCounts( rhs.sampleCounts ) + , maxResourceSize( rhs.maxResourceSize ) + {} + + ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageFormatProperties ) ); + return *this; + } + + ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageFormatProperties& operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatProperties const& ) const = default; +#else + bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxExtent == rhs.maxExtent ) + && ( maxMipLevels == rhs.maxMipLevels ) + && ( maxArrayLayers == rhs.maxArrayLayers ) + && ( sampleCounts == rhs.sampleCounts ) + && ( maxResourceSize == rhs.maxResourceSize ); + } + + bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {}; + uint32_t maxMipLevels = {}; + uint32_t maxArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {}; + }; + static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalImageFormatPropertiesNV + { + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + , externalMemoryFeatures( externalMemoryFeatures_ ) + , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( rhs.imageFormatProperties ) + , externalMemoryFeatures( rhs.externalMemoryFeatures ) + , exportFromImportedHandleTypes( rhs.exportFromImportedHandleTypes ) + , compatibleHandleTypes( rhs.compatibleHandleTypes ) + {} + + ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ExternalImageFormatPropertiesNV ) ); + return *this; + } + + ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalImageFormatPropertiesNV& operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default; +#else + bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( imageFormatProperties == rhs.imageFormatProperties ) + && ( externalMemoryFeatures == rhs.externalMemoryFeatures ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ); + } + + bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {}; + }; + static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalMemoryBufferCreateInfo + { + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryBufferCreateInfo ) - offsetof( ExternalMemoryBufferCreateInfo, pNext ) ); + return *this; + } + + ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalMemoryBufferCreateInfo& operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default; +#else + bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalMemoryImageCreateInfo + { + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfo ) - offsetof( ExternalMemoryImageCreateInfo, pNext ) ); + return *this; + } + + ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalMemoryImageCreateInfo& operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalMemoryImageCreateInfoNV + { + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {} ) VULKAN_HPP_NOEXCEPT + : handleTypes( handleTypes_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleTypes( rhs.handleTypes ) + {} + + ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalMemoryImageCreateInfoNV ) - offsetof( ExternalMemoryImageCreateInfoNV, pNext ) ); + return *this; + } + + ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalMemoryImageCreateInfoNV& operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT + { + handleTypes = handleTypes_; + return *this; + } + + operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default; +#else + bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleTypes == rhs.handleTypes ); + } + + bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {}; + }; + static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ExternalSemaphoreProperties + { + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) + , compatibleHandleTypes( compatibleHandleTypes_ ) + , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , exportFromImportedHandleTypes( rhs.exportFromImportedHandleTypes ) + , compatibleHandleTypes( rhs.compatibleHandleTypes ) + , externalSemaphoreFeatures( rhs.externalSemaphoreFeatures ) + {} + + ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ExternalSemaphoreProperties ) - offsetof( ExternalSemaphoreProperties, pNext ) ); + return *this; + } + + ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ExternalSemaphoreProperties& operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ExternalSemaphoreProperties const& ) const = default; +#else + bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes ) + && ( compatibleHandleTypes == rhs.compatibleHandleTypes ) + && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures ); + } + + bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {}; + }; + static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkFence.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFence.hpp new file mode 100644 index 00000000..fc6de259 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFence.hpp @@ -0,0 +1,321 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFence.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct FenceCreateInfo + { + VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + {} + + FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FenceCreateInfo ) - offsetof( FenceCreateInfo, pNext ) ); + return *this; + } + + FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FenceCreateInfo& operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceCreateInfo const& ) const = default; +#else + bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {}; + }; + static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FenceGetFdInfoKHR + { + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fence( rhs.fence ) + , handleType( rhs.handleType ) + {} + + FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FenceGetFdInfoKHR ) - offsetof( FenceGetFdInfoKHR, pNext ) ); + return *this; + } + + FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FenceGetFdInfoKHR& operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceGetFdInfoKHR const& ) const = default; +#else + bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct FenceGetWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fence( rhs.fence ) + , handleType( rhs.handleType ) + {} + + FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FenceGetWin32HandleInfoKHR ) - offsetof( FenceGetWin32HandleInfoKHR, pNext ) ); + return *this; + } + + FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FenceGetWin32HandleInfoKHR& operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkFilter.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFilter.hpp new file mode 100644 index 00000000..c7477178 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFilter.hpp @@ -0,0 +1,142 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct FilterCubicImageViewImageFormatPropertiesEXT + { + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {} ) VULKAN_HPP_NOEXCEPT + : filterCubic( filterCubic_ ) + , filterCubicMinmax( filterCubicMinmax_ ) + {} + + VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , filterCubic( rhs.filterCubic ) + , filterCubicMinmax( rhs.filterCubicMinmax ) + {} + + FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) - offsetof( FilterCubicImageViewImageFormatPropertiesEXT, pNext ) ); + return *this; + } + + FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FilterCubicImageViewImageFormatPropertiesEXT& operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default; +#else + bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( filterCubic == rhs.filterCubic ) + && ( filterCubicMinmax == rhs.filterCubicMinmax ); + } + + bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {}; + }; + static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkFormat.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFormat.hpp new file mode 100644 index 00000000..26b24e89 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFormat.hpp @@ -0,0 +1,206 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkFormat.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct FormatProperties + { + VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( linearTilingFeatures_ ) + , optimalTilingFeatures( optimalTilingFeatures_ ) + , bufferFeatures( bufferFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : linearTilingFeatures( rhs.linearTilingFeatures ) + , optimalTilingFeatures( rhs.optimalTilingFeatures ) + , bufferFeatures( rhs.bufferFeatures ) + {} + + FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( FormatProperties ) ); + return *this; + } + + FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FormatProperties& operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FormatProperties const& ) const = default; +#else + bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( linearTilingFeatures == rhs.linearTilingFeatures ) + && ( optimalTilingFeatures == rhs.optimalTilingFeatures ) + && ( bufferFeatures == rhs.bufferFeatures ); + } + + bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {}; + VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {}; + }; + static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FormatProperties2 + { + VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , formatProperties( rhs.formatProperties ) + {} + + FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FormatProperties2 ) - offsetof( FormatProperties2, pNext ) ); + return *this; + } + + FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FormatProperties2& operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FormatProperties2 const& ) const = default; +#else + bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( formatProperties == rhs.formatProperties ); + } + + bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {}; + }; + static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkFramebuffer.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFramebuffer.hpp new file mode 100644 index 00000000..161e8ce2 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkFramebuffer.hpp @@ -0,0 +1,520 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkFramebuffer.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct FramebufferAttachmentImageInfoKHR + { + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfoKHR( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layerCount_ = {}, + uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , width( width_ ) + , height( height_ ) + , layerCount( layerCount_ ) + , viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfoKHR( FramebufferAttachmentImageInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , usage( rhs.usage ) + , width( rhs.width ) + , height( rhs.height ) + , layerCount( rhs.layerCount ) + , viewFormatCount( rhs.viewFormatCount ) + , pViewFormats( rhs.pViewFormats ) + {} + + FramebufferAttachmentImageInfoKHR & operator=( FramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentImageInfoKHR ) - offsetof( FramebufferAttachmentImageInfoKHR, pNext ) ); + return *this; + } + + FramebufferAttachmentImageInfoKHR( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FramebufferAttachmentImageInfoKHR& operator=( VkFramebufferAttachmentImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FramebufferAttachmentImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + FramebufferAttachmentImageInfoKHR & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + + operator VkFramebufferAttachmentImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentImageInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferAttachmentImageInfoKHR const& ) const = default; +#else + bool operator==( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layerCount == rhs.layerCount ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( FramebufferAttachmentImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layerCount = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + }; + static_assert( sizeof( FramebufferAttachmentImageInfoKHR ) == sizeof( VkFramebufferAttachmentImageInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FramebufferAttachmentsCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfoKHR( uint32_t attachmentImageInfoCount_ = {}, + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentImageInfoCount( attachmentImageInfoCount_ ) + , pAttachmentImageInfos( pAttachmentImageInfos_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfoKHR( FramebufferAttachmentsCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , attachmentImageInfoCount( rhs.attachmentImageInfoCount ) + , pAttachmentImageInfos( rhs.pAttachmentImageInfos ) + {} + + FramebufferAttachmentsCreateInfoKHR & operator=( FramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FramebufferAttachmentsCreateInfoKHR ) - offsetof( FramebufferAttachmentsCreateInfoKHR, pNext ) ); + return *this; + } + + FramebufferAttachmentsCreateInfoKHR( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FramebufferAttachmentsCreateInfoKHR& operator=( VkFramebufferAttachmentsCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FramebufferAttachmentsCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferAttachmentsCreateInfoKHR & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentImageInfoCount = attachmentImageInfoCount_; + return *this; + } + + FramebufferAttachmentsCreateInfoKHR & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentImageInfos = pAttachmentImageInfos_; + return *this; + } + + operator VkFramebufferAttachmentsCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferAttachmentsCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferAttachmentsCreateInfoKHR const& ) const = default; +#else + bool operator==( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount ) + && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos ); + } + + bool operator!=( FramebufferAttachmentsCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfoKHR; + const void* pNext = {}; + uint32_t attachmentImageInfoCount = {}; + const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR* pAttachmentImageInfos = {}; + }; + static_assert( sizeof( FramebufferAttachmentsCreateInfoKHR ) == sizeof( VkFramebufferAttachmentsCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FramebufferCreateInfo + { + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, + uint32_t width_ = {}, + uint32_t height_ = {}, + uint32_t layers_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , renderPass( renderPass_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , width( width_ ) + , height( height_ ) + , layers( layers_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , renderPass( rhs.renderPass ) + , attachmentCount( rhs.attachmentCount ) + , pAttachments( rhs.pAttachments ) + , width( rhs.width ) + , height( rhs.height ) + , layers( rhs.layers ) + {} + + FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FramebufferCreateInfo ) - offsetof( FramebufferCreateInfo, pNext ) ); + return *this; + } + + FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FramebufferCreateInfo& operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + + FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT + { + layers = layers_; + return *this; + } + + operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferCreateInfo const& ) const = default; +#else + bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( renderPass == rhs.renderPass ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( layers == rhs.layers ); + } + + bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; + uint32_t width = {}; + uint32_t height = {}; + uint32_t layers = {}; + }; + static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct FramebufferMixedSamplesCombinationNV + { + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + , rasterizationSamples( rasterizationSamples_ ) + , depthStencilSamples( depthStencilSamples_ ) + , colorSamples( colorSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , coverageReductionMode( rhs.coverageReductionMode ) + , rasterizationSamples( rhs.rasterizationSamples ) + , depthStencilSamples( rhs.depthStencilSamples ) + , colorSamples( rhs.colorSamples ) + {} + + FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( FramebufferMixedSamplesCombinationNV ) - offsetof( FramebufferMixedSamplesCombinationNV, pNext ) ); + return *this; + } + + FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + FramebufferMixedSamplesCombinationNV& operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default; +#else + bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( coverageReductionMode == rhs.coverageReductionMode ) + && ( rasterizationSamples == rhs.rasterizationSamples ) + && ( depthStencilSamples == rhs.depthStencilSamples ) + && ( colorSamples == rhs.colorSamples ); + } + + bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {}; + }; + static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkGeometry.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkGeometry.hpp new file mode 100644 index 00000000..78a748e6 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkGeometry.hpp @@ -0,0 +1,79 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkGraphics.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkGraphics.hpp new file mode 100644 index 00000000..4a218709 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkGraphics.hpp @@ -0,0 +1,2040 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct VertexInputBindingDescription + { + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( uint32_t binding_ = {}, + uint32_t stride_ = {}, + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , stride( stride_ ) + , inputRate( inputRate_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const& rhs ) VULKAN_HPP_NOEXCEPT + : binding( rhs.binding ) + , stride( rhs.stride ) + , inputRate( rhs.inputRate ) + {} + + VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VertexInputBindingDescription ) ); + return *this; + } + + VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + VertexInputBindingDescription& operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT + { + inputRate = inputRate_; + return *this; + } + + operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputBindingDescription const& ) const = default; +#else + bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) + && ( stride == rhs.stride ) + && ( inputRate == rhs.inputRate ); + } + + bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t stride = {}; + VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex; + }; + static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct VertexInputAttributeDescription + { + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( uint32_t location_ = {}, + uint32_t binding_ = {}, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT + : location( location_ ) + , binding( binding_ ) + , format( format_ ) + , offset( offset_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const& rhs ) VULKAN_HPP_NOEXCEPT + : location( rhs.location ) + , binding( rhs.binding ) + , format( rhs.format ) + , offset( rhs.offset ) + {} + + VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VertexInputAttributeDescription ) ); + return *this; + } + + VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + VertexInputAttributeDescription& operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT + { + location = location_; + return *this; + } + + VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputAttributeDescription const& ) const = default; +#else + bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( location == rhs.location ) + && ( binding == rhs.binding ) + && ( format == rhs.format ) + && ( offset == rhs.offset ); + } + + bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t location = {}; + uint32_t binding = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + uint32_t offset = {}; + }; + static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineVertexInputStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, + uint32_t vertexBindingDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, + uint32_t vertexAttributeDescriptionCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) + , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) + , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) + , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , vertexBindingDescriptionCount( rhs.vertexBindingDescriptionCount ) + , pVertexBindingDescriptions( rhs.pVertexBindingDescriptions ) + , vertexAttributeDescriptionCount( rhs.vertexAttributeDescriptionCount ) + , pVertexAttributeDescriptions( rhs.pVertexAttributeDescriptions ) + {} + + PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputStateCreateInfo ) - offsetof( PipelineVertexInputStateCreateInfo, pNext ) ); + return *this; + } + + PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineVertexInputStateCreateInfo& operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDescriptionCount = vertexBindingDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDescriptions = pVertexBindingDescriptions_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_; + return *this; + } + + PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT + { + pVertexAttributeDescriptions = pVertexAttributeDescriptions_; + return *this; + } + + operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount ) + && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions ) + && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount ) + && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions ); + } + + bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {}; + uint32_t vertexBindingDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {}; + uint32_t vertexAttributeDescriptionCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {}; + }; + static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineInputAssemblyStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , topology( topology_ ) + , primitiveRestartEnable( primitiveRestartEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , topology( rhs.topology ) + , primitiveRestartEnable( rhs.primitiveRestartEnable ) + {} + + PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineInputAssemblyStateCreateInfo ) - offsetof( PipelineInputAssemblyStateCreateInfo, pNext ) ); + return *this; + } + + PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineInputAssemblyStateCreateInfo& operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT + { + topology = topology_; + return *this; + } + + PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT + { + primitiveRestartEnable = primitiveRestartEnable_; + return *this; + } + + operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( topology == rhs.topology ) + && ( primitiveRestartEnable == rhs.primitiveRestartEnable ); + } + + bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList; + VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {}; + }; + static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineTessellationStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, + uint32_t patchControlPoints_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , patchControlPoints( patchControlPoints_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , patchControlPoints( rhs.patchControlPoints ) + {} + + PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationStateCreateInfo ) - offsetof( PipelineTessellationStateCreateInfo, pNext ) ); + return *this; + } + + PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineTessellationStateCreateInfo& operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT + { + patchControlPoints = patchControlPoints_; + return *this; + } + + operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( patchControlPoints == rhs.patchControlPoints ); + } + + bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {}; + uint32_t patchControlPoints = {}; + }; + static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct Viewport + { + VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, + float y_ = {}, + float width_ = {}, + float height_ = {}, + float minDepth_ = {}, + float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , width( width_ ) + , height( height_ ) + , minDepth( minDepth_ ) + , maxDepth( maxDepth_ ) + {} + + VULKAN_HPP_CONSTEXPR Viewport( Viewport const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + , width( rhs.width ) + , height( rhs.height ) + , minDepth( rhs.minDepth ) + , maxDepth( rhs.maxDepth ) + {} + + Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( Viewport ) ); + return *this; + } + + Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Viewport& operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT + { + width = width_; + return *this; + } + + Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT + { + height = height_; + return *this; + } + + Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT + { + minDepth = minDepth_; + return *this; + } + + Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxDepth = maxDepth_; + return *this; + } + + operator VkViewport const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewport &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Viewport const& ) const = default; +#else + bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( width == rhs.width ) + && ( height == rhs.height ) + && ( minDepth == rhs.minDepth ) + && ( maxDepth == rhs.maxDepth ); + } + + bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + float width = {}; + float height = {}; + float minDepth = {}; + float maxDepth = {}; + }; + static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, + uint32_t scissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewports( pViewports_ ) + , scissorCount( scissorCount_ ) + , pScissors( pScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , viewportCount( rhs.viewportCount ) + , pViewports( rhs.pViewports ) + , scissorCount( rhs.scissorCount ) + , pScissors( rhs.pScissors ) + {} + + PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportStateCreateInfo ) - offsetof( PipelineViewportStateCreateInfo, pNext ) ); + return *this; + } + + PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportStateCreateInfo& operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT + { + pViewports = pViewports_; + return *this; + } + + PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT + { + scissorCount = scissorCount_; + return *this; + } + + PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT + { + pScissors = pScissors_; + return *this; + } + + operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewports == rhs.pViewports ) + && ( scissorCount == rhs.scissorCount ) + && ( pScissors == rhs.pScissors ); + } + + bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {}; + uint32_t scissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {}; + }; + static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, + VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, + float depthBiasConstantFactor_ = {}, + float depthBiasClamp_ = {}, + float depthBiasSlopeFactor_ = {}, + float lineWidth_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClampEnable( depthClampEnable_ ) + , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) + , polygonMode( polygonMode_ ) + , cullMode( cullMode_ ) + , frontFace( frontFace_ ) + , depthBiasEnable( depthBiasEnable_ ) + , depthBiasConstantFactor( depthBiasConstantFactor_ ) + , depthBiasClamp( depthBiasClamp_ ) + , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + , lineWidth( lineWidth_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , depthClampEnable( rhs.depthClampEnable ) + , rasterizerDiscardEnable( rhs.rasterizerDiscardEnable ) + , polygonMode( rhs.polygonMode ) + , cullMode( rhs.cullMode ) + , frontFace( rhs.frontFace ) + , depthBiasEnable( rhs.depthBiasEnable ) + , depthBiasConstantFactor( rhs.depthBiasConstantFactor ) + , depthBiasClamp( rhs.depthBiasClamp ) + , depthBiasSlopeFactor( rhs.depthBiasSlopeFactor ) + , lineWidth( rhs.lineWidth ) + {} + + PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateCreateInfo ) - offsetof( PipelineRasterizationStateCreateInfo, pNext ) ); + return *this; + } + + PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationStateCreateInfo& operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClampEnable = depthClampEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT + { + rasterizerDiscardEnable = rasterizerDiscardEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT + { + polygonMode = polygonMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT + { + cullMode = cullMode_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT + { + frontFace = frontFace_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasEnable = depthBiasEnable_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasConstantFactor = depthBiasConstantFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasClamp = depthBiasClamp_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT + { + depthBiasSlopeFactor = depthBiasSlopeFactor_; + return *this; + } + + PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT + { + lineWidth = lineWidth_; + return *this; + } + + operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthClampEnable == rhs.depthClampEnable ) + && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable ) + && ( polygonMode == rhs.polygonMode ) + && ( cullMode == rhs.cullMode ) + && ( frontFace == rhs.frontFace ) + && ( depthBiasEnable == rhs.depthBiasEnable ) + && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor ) + && ( depthBiasClamp == rhs.depthBiasClamp ) + && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor ) + && ( lineWidth == rhs.lineWidth ); + } + + bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {}; + VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill; + VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {}; + VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise; + VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {}; + float depthBiasConstantFactor = {}; + float depthBiasClamp = {}; + float depthBiasSlopeFactor = {}; + float lineWidth = {}; + }; + static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineMultisampleStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, + float minSampleShading_ = {}, + const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationSamples( rasterizationSamples_ ) + , sampleShadingEnable( sampleShadingEnable_ ) + , minSampleShading( minSampleShading_ ) + , pSampleMask( pSampleMask_ ) + , alphaToCoverageEnable( alphaToCoverageEnable_ ) + , alphaToOneEnable( alphaToOneEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , rasterizationSamples( rhs.rasterizationSamples ) + , sampleShadingEnable( rhs.sampleShadingEnable ) + , minSampleShading( rhs.minSampleShading ) + , pSampleMask( rhs.pSampleMask ) + , alphaToCoverageEnable( rhs.alphaToCoverageEnable ) + , alphaToOneEnable( rhs.alphaToOneEnable ) + {} + + PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineMultisampleStateCreateInfo ) - offsetof( PipelineMultisampleStateCreateInfo, pNext ) ); + return *this; + } + + PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineMultisampleStateCreateInfo& operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationSamples = rasterizationSamples_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleShadingEnable = sampleShadingEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT + { + minSampleShading = minSampleShading_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT + { + pSampleMask = pSampleMask_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToCoverageEnable = alphaToCoverageEnable_; + return *this; + } + + PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT + { + alphaToOneEnable = alphaToOneEnable_; + return *this; + } + + operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( rasterizationSamples == rhs.rasterizationSamples ) + && ( sampleShadingEnable == rhs.sampleShadingEnable ) + && ( minSampleShading == rhs.minSampleShading ) + && ( pSampleMask == rhs.pSampleMask ) + && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable ) + && ( alphaToOneEnable == rhs.alphaToOneEnable ); + } + + bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {}; + float minSampleShading = {}; + const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {}; + }; + static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct StencilOpState + { + VULKAN_HPP_CONSTEXPR StencilOpState( VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + uint32_t compareMask_ = {}, + uint32_t writeMask_ = {}, + uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT + : failOp( failOp_ ) + , passOp( passOp_ ) + , depthFailOp( depthFailOp_ ) + , compareOp( compareOp_ ) + , compareMask( compareMask_ ) + , writeMask( writeMask_ ) + , reference( reference_ ) + {} + + VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const& rhs ) VULKAN_HPP_NOEXCEPT + : failOp( rhs.failOp ) + , passOp( rhs.passOp ) + , depthFailOp( rhs.depthFailOp ) + , compareOp( rhs.compareOp ) + , compareMask( rhs.compareMask ) + , writeMask( rhs.writeMask ) + , reference( rhs.reference ) + {} + + StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( StencilOpState ) ); + return *this; + } + + StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + StencilOpState& operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT + { + failOp = failOp_; + return *this; + } + + StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT + { + passOp = passOp_; + return *this; + } + + StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT + { + depthFailOp = depthFailOp_; + return *this; + } + + StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT + { + compareMask = compareMask_; + return *this; + } + + StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT + { + writeMask = writeMask_; + return *this; + } + + StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT + { + reference = reference_; + return *this; + } + + operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StencilOpState const& ) const = default; +#else + bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( failOp == rhs.failOp ) + && ( passOp == rhs.passOp ) + && ( depthFailOp == rhs.depthFailOp ) + && ( compareOp == rhs.compareOp ) + && ( compareMask == rhs.compareMask ) + && ( writeMask == rhs.writeMask ) + && ( reference == rhs.reference ); + } + + bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + uint32_t compareMask = {}; + uint32_t writeMask = {}; + uint32_t reference = {}; + }; + static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineDepthStencilStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, + VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, + float minDepthBounds_ = {}, + float maxDepthBounds_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthTestEnable( depthTestEnable_ ) + , depthWriteEnable( depthWriteEnable_ ) + , depthCompareOp( depthCompareOp_ ) + , depthBoundsTestEnable( depthBoundsTestEnable_ ) + , stencilTestEnable( stencilTestEnable_ ) + , front( front_ ) + , back( back_ ) + , minDepthBounds( minDepthBounds_ ) + , maxDepthBounds( maxDepthBounds_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , depthTestEnable( rhs.depthTestEnable ) + , depthWriteEnable( rhs.depthWriteEnable ) + , depthCompareOp( rhs.depthCompareOp ) + , depthBoundsTestEnable( rhs.depthBoundsTestEnable ) + , stencilTestEnable( rhs.stencilTestEnable ) + , front( rhs.front ) + , back( rhs.back ) + , minDepthBounds( rhs.minDepthBounds ) + , maxDepthBounds( rhs.maxDepthBounds ) + {} + + PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineDepthStencilStateCreateInfo ) - offsetof( PipelineDepthStencilStateCreateInfo, pNext ) ); + return *this; + } + + PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineDepthStencilStateCreateInfo& operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthTestEnable = depthTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthWriteEnable = depthWriteEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT + { + depthCompareOp = depthCompareOp_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthBoundsTestEnable = depthBoundsTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + stencilTestEnable = stencilTestEnable_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState front_ ) VULKAN_HPP_NOEXCEPT + { + front = front_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState back_ ) VULKAN_HPP_NOEXCEPT + { + back = back_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + minDepthBounds = minDepthBounds_; + return *this; + } + + PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthBounds = maxDepthBounds_; + return *this; + } + + operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthTestEnable == rhs.depthTestEnable ) + && ( depthWriteEnable == rhs.depthWriteEnable ) + && ( depthCompareOp == rhs.depthCompareOp ) + && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable ) + && ( stencilTestEnable == rhs.stencilTestEnable ) + && ( front == rhs.front ) + && ( back == rhs.back ) + && ( minDepthBounds == rhs.minDepthBounds ) + && ( maxDepthBounds == rhs.maxDepthBounds ); + } + + bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {}; + VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {}; + VULKAN_HPP_NAMESPACE::StencilOpState front = {}; + VULKAN_HPP_NAMESPACE::StencilOpState back = {}; + float minDepthBounds = {}; + float maxDepthBounds = {}; + }; + static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineColorBlendAttachmentState + { + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT + : blendEnable( blendEnable_ ) + , srcColorBlendFactor( srcColorBlendFactor_ ) + , dstColorBlendFactor( dstColorBlendFactor_ ) + , colorBlendOp( colorBlendOp_ ) + , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) + , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) + , alphaBlendOp( alphaBlendOp_ ) + , colorWriteMask( colorWriteMask_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const& rhs ) VULKAN_HPP_NOEXCEPT + : blendEnable( rhs.blendEnable ) + , srcColorBlendFactor( rhs.srcColorBlendFactor ) + , dstColorBlendFactor( rhs.dstColorBlendFactor ) + , colorBlendOp( rhs.colorBlendOp ) + , srcAlphaBlendFactor( rhs.srcAlphaBlendFactor ) + , dstAlphaBlendFactor( rhs.dstAlphaBlendFactor ) + , alphaBlendOp( rhs.alphaBlendOp ) + , colorWriteMask( rhs.colorWriteMask ) + {} + + PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PipelineColorBlendAttachmentState ) ); + return *this; + } + + PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineColorBlendAttachmentState& operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT + { + blendEnable = blendEnable_; + return *this; + } + + PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcColorBlendFactor = srcColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstColorBlendFactor = dstColorBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + colorBlendOp = colorBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + srcAlphaBlendFactor = srcAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT + { + dstAlphaBlendFactor = dstAlphaBlendFactor_; + return *this; + } + + PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT + { + alphaBlendOp = alphaBlendOp_; + return *this; + } + + PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT + { + colorWriteMask = colorWriteMask_; + return *this; + } + + operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default; +#else + bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( blendEnable == rhs.blendEnable ) + && ( srcColorBlendFactor == rhs.srcColorBlendFactor ) + && ( dstColorBlendFactor == rhs.dstColorBlendFactor ) + && ( colorBlendOp == rhs.colorBlendOp ) + && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor ) + && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor ) + && ( alphaBlendOp == rhs.alphaBlendOp ) + && ( colorWriteMask == rhs.colorWriteMask ); + } + + bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {}; + VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero; + VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd; + VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {}; + }; + static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineColorBlendStateCreateInfo + { + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, + VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, + std::array const& blendConstants_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , logicOpEnable( logicOpEnable_ ) + , logicOp( logicOp_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , blendConstants{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( blendConstants, blendConstants_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , logicOpEnable( rhs.logicOpEnable ) + , logicOp( rhs.logicOp ) + , attachmentCount( rhs.attachmentCount ) + , pAttachments( rhs.pAttachments ) + , blendConstants{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( blendConstants, rhs.blendConstants ); + } + + PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendStateCreateInfo ) - offsetof( PipelineColorBlendStateCreateInfo, pNext ) ); + return *this; + } + + PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineColorBlendStateCreateInfo& operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT + { + logicOpEnable = logicOpEnable_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT + { + logicOp = logicOp_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + + PipelineColorBlendStateCreateInfo & setBlendConstants( std::array blendConstants_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( blendConstants, blendConstants_.data(), 4 * sizeof( float ) ); + return *this; + } + + operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( logicOpEnable == rhs.logicOpEnable ) + && ( logicOp == rhs.logicOp ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( memcmp( blendConstants, rhs.blendConstants, 4 * sizeof( float ) ) == 0 ); + } + + bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {}; + VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {}; + float blendConstants[4] = {}; + }; + static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineDynamicStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, + uint32_t dynamicStateCount_ = {}, + const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dynamicStateCount( dynamicStateCount_ ) + , pDynamicStates( pDynamicStates_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , dynamicStateCount( rhs.dynamicStateCount ) + , pDynamicStates( rhs.pDynamicStates ) + {} + + PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineDynamicStateCreateInfo ) - offsetof( PipelineDynamicStateCreateInfo, pNext ) ); + return *this; + } + + PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineDynamicStateCreateInfo& operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicStateCount = dynamicStateCount_; + return *this; + } + + PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicStates = pDynamicStates_; + return *this; + } + + operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dynamicStateCount == rhs.dynamicStateCount ) + && ( pDynamicStates == rhs.pDynamicStates ); + } + + bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {}; + uint32_t dynamicStateCount = {}; + const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {}; + }; + static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct GraphicsPipelineCreateInfo + { + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + uint32_t subpass_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , pVertexInputState( pVertexInputState_ ) + , pInputAssemblyState( pInputAssemblyState_ ) + , pTessellationState( pTessellationState_ ) + , pViewportState( pViewportState_ ) + , pRasterizationState( pRasterizationState_ ) + , pMultisampleState( pMultisampleState_ ) + , pDepthStencilState( pDepthStencilState_ ) + , pColorBlendState( pColorBlendState_ ) + , pDynamicState( pDynamicState_ ) + , layout( layout_ ) + , renderPass( renderPass_ ) + , subpass( subpass_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , stageCount( rhs.stageCount ) + , pStages( rhs.pStages ) + , pVertexInputState( rhs.pVertexInputState ) + , pInputAssemblyState( rhs.pInputAssemblyState ) + , pTessellationState( rhs.pTessellationState ) + , pViewportState( rhs.pViewportState ) + , pRasterizationState( rhs.pRasterizationState ) + , pMultisampleState( rhs.pMultisampleState ) + , pDepthStencilState( rhs.pDepthStencilState ) + , pColorBlendState( rhs.pColorBlendState ) + , pDynamicState( rhs.pDynamicState ) + , layout( rhs.layout ) + , renderPass( rhs.renderPass ) + , subpass( rhs.subpass ) + , basePipelineHandle( rhs.basePipelineHandle ) + , basePipelineIndex( rhs.basePipelineIndex ) + {} + + GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( GraphicsPipelineCreateInfo ) - offsetof( GraphicsPipelineCreateInfo, pNext ) ); + return *this; + } + + GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + GraphicsPipelineCreateInfo& operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + + GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT + { + pVertexInputState = pVertexInputState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT + { + pInputAssemblyState = pInputAssemblyState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT + { + pTessellationState = pTessellationState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT + { + pViewportState = pViewportState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT + { + pRasterizationState = pRasterizationState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT + { + pMultisampleState = pMultisampleState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilState = pDepthStencilState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT + { + pColorBlendState = pColorBlendState_; + return *this; + } + + GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicState = pDynamicState_; + return *this; + } + + GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default; +#else + bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( pVertexInputState == rhs.pVertexInputState ) + && ( pInputAssemblyState == rhs.pInputAssemblyState ) + && ( pTessellationState == rhs.pTessellationState ) + && ( pViewportState == rhs.pViewportState ) + && ( pRasterizationState == rhs.pRasterizationState ) + && ( pMultisampleState == rhs.pMultisampleState ) + && ( pDepthStencilState == rhs.pDepthStencilState ) + && ( pColorBlendState == rhs.pColorBlendState ) + && ( pDynamicState == rhs.pDynamicState ) + && ( layout == rhs.layout ) + && ( renderPass == rhs.renderPass ) + && ( subpass == rhs.subpass ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {}; + const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {}; + const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {}; + const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {}; + const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {}; + const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {}; + const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + uint32_t subpass = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkHdr.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkHdr.hpp new file mode 100644 index 00000000..a3f66078 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkHdr.hpp @@ -0,0 +1,305 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct XYColorEXT + { + VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, + float y_ = {} ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + {} + + VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + {} + + XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( XYColorEXT ) ); + return *this; + } + + XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + XYColorEXT& operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XYColorEXT const& ) const = default; +#else + bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ); + } + + bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float x = {}; + float y = {}; + }; + static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct HdrMetadataEXT + { + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, + float maxLuminance_ = {}, + float minLuminance_ = {}, + float maxContentLightLevel_ = {}, + float maxFrameAverageLightLevel_ = {} ) VULKAN_HPP_NOEXCEPT + : displayPrimaryRed( displayPrimaryRed_ ) + , displayPrimaryGreen( displayPrimaryGreen_ ) + , displayPrimaryBlue( displayPrimaryBlue_ ) + , whitePoint( whitePoint_ ) + , maxLuminance( maxLuminance_ ) + , minLuminance( minLuminance_ ) + , maxContentLightLevel( maxContentLightLevel_ ) + , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + {} + + VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , displayPrimaryRed( rhs.displayPrimaryRed ) + , displayPrimaryGreen( rhs.displayPrimaryGreen ) + , displayPrimaryBlue( rhs.displayPrimaryBlue ) + , whitePoint( rhs.whitePoint ) + , maxLuminance( rhs.maxLuminance ) + , minLuminance( rhs.minLuminance ) + , maxContentLightLevel( rhs.maxContentLightLevel ) + , maxFrameAverageLightLevel( rhs.maxFrameAverageLightLevel ) + {} + + HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( HdrMetadataEXT ) - offsetof( HdrMetadataEXT, pNext ) ); + return *this; + } + + HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + HdrMetadataEXT& operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryRed = displayPrimaryRed_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryGreen = displayPrimaryGreen_; + return *this; + } + + HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT + { + displayPrimaryBlue = displayPrimaryBlue_; + return *this; + } + + HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ ) VULKAN_HPP_NOEXCEPT + { + whitePoint = whitePoint_; + return *this; + } + + HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT + { + maxLuminance = maxLuminance_; + return *this; + } + + HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT + { + minLuminance = minLuminance_; + return *this; + } + + HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxContentLightLevel = maxContentLightLevel_; + return *this; + } + + HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameAverageLightLevel = maxFrameAverageLightLevel_; + return *this; + } + + operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( HdrMetadataEXT const& ) const = default; +#else + bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( displayPrimaryRed == rhs.displayPrimaryRed ) + && ( displayPrimaryGreen == rhs.displayPrimaryGreen ) + && ( displayPrimaryBlue == rhs.displayPrimaryBlue ) + && ( whitePoint == rhs.whitePoint ) + && ( maxLuminance == rhs.maxLuminance ) + && ( minLuminance == rhs.minLuminance ) + && ( maxContentLightLevel == rhs.maxContentLightLevel ) + && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel ); + } + + bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {}; + VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {}; + float maxLuminance = {}; + float minLuminance = {}; + float maxContentLightLevel = {}; + float maxFrameAverageLightLevel = {}; + }; + static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkHeadless.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkHeadless.hpp new file mode 100644 index 00000000..c384d321 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkHeadless.hpp @@ -0,0 +1,155 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct HeadlessSurfaceCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + {} + + HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( HeadlessSurfaceCreateInfoEXT ) - offsetof( HeadlessSurfaceCreateInfoEXT, pNext ) ); + return *this; + } + + HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + HeadlessSurfaceCreateInfoEXT& operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default; +#else + bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {}; + }; + static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkI.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkI.hpp new file mode 100644 index 00000000..018fa9c4 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkI.hpp @@ -0,0 +1,169 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_IOS_MVK + struct IOSSurfaceCreateInfoMVK + { + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, + const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pView( rhs.pView ) + {} + + IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( IOSSurfaceCreateInfoMVK ) - offsetof( IOSSurfaceCreateInfoMVK, pNext ) ); + return *this; + } + + IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + IOSSurfaceCreateInfoMVK& operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default; +#else + bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {}; + const void* pView = {}; + }; + static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkImage.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkImage.hpp new file mode 100644 index 00000000..eb6bf984 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkImage.hpp @@ -0,0 +1,2161 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkImage.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ImageBlit + { + VULKAN_HPP_CONSTEXPR_14 ImageBlit( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + std::array const& srcOffsets_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + std::array const& dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffsets{} + , dstSubresource( dstSubresource_ ) + , dstOffsets{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( srcOffsets, srcOffsets_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( dstOffsets, dstOffsets_ ); + } + + VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const& rhs ) VULKAN_HPP_NOEXCEPT + : srcSubresource( rhs.srcSubresource ) + , srcOffsets{} + , dstSubresource( rhs.dstSubresource ) + , dstOffsets{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( srcOffsets, rhs.srcOffsets ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( dstOffsets, rhs.dstOffsets ); + } + + ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageBlit ) ); + return *this; + } + + ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageBlit& operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageBlit & setSrcOffsets( std::array srcOffsets_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( srcOffsets, srcOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ); + return *this; + } + + ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageBlit & setDstOffsets( std::array dstOffsets_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( dstOffsets, dstOffsets_.data(), 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ); + return *this; + } + + operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageBlit &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageBlit const& ) const = default; +#else + bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( memcmp( srcOffsets, rhs.srcOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 ) + && ( dstSubresource == rhs.dstSubresource ) + && ( memcmp( dstOffsets, rhs.dstOffsets, 2 * sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) ) == 0 ); + } + + bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffsets[2] = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffsets[2] = {}; + }; + static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageCopy + { + VULKAN_HPP_CONSTEXPR ImageCopy( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const& rhs ) VULKAN_HPP_NOEXCEPT + : srcSubresource( rhs.srcSubresource ) + , srcOffset( rhs.srcOffset ) + , dstSubresource( rhs.dstSubresource ) + , dstOffset( rhs.dstOffset ) + , extent( rhs.extent ) + {} + + ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageCopy ) ); + return *this; + } + + ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageCopy& operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCopy &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageCopy const& ) const = default; +#else + bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageCreateInfo + { + VULKAN_HPP_CONSTEXPR ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, + uint32_t mipLevels_ = {}, + uint32_t arrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t* pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imageType( imageType_ ) + , format( format_ ) + , extent( extent_ ) + , mipLevels( mipLevels_ ) + , arrayLayers( arrayLayers_ ) + , samples( samples_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , initialLayout( initialLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , imageType( rhs.imageType ) + , format( rhs.format ) + , extent( rhs.extent ) + , mipLevels( rhs.mipLevels ) + , arrayLayers( rhs.arrayLayers ) + , samples( rhs.samples ) + , tiling( rhs.tiling ) + , usage( rhs.usage ) + , sharingMode( rhs.sharingMode ) + , queueFamilyIndexCount( rhs.queueFamilyIndexCount ) + , pQueueFamilyIndices( rhs.pQueueFamilyIndices ) + , initialLayout( rhs.initialLayout ) + {} + + ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageCreateInfo ) - offsetof( ImageCreateInfo, pNext ) ); + return *this; + } + + ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageCreateInfo& operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT + { + imageType = imageType_; + return *this; + } + + ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT + { + mipLevels = mipLevels_; + return *this; + } + + ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + arrayLayers = arrayLayers_; + return *this; + } + + ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT + { + initialLayout = initialLayout_; + return *this; + } + + operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageCreateInfo const& ) const = default; +#else + bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( imageType == rhs.imageType ) + && ( format == rhs.format ) + && ( extent == rhs.extent ) + && ( mipLevels == rhs.mipLevels ) + && ( arrayLayers == rhs.arrayLayers ) + && ( samples == rhs.samples ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( initialLayout == rhs.initialLayout ); + } + + bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + uint32_t mipLevels = {}; + uint32_t arrayLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t* pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + }; + static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubresourceLayout + { + VULKAN_HPP_CONSTEXPR SubresourceLayout( VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , size( size_ ) + , rowPitch( rowPitch_ ) + , arrayPitch( arrayPitch_ ) + , depthPitch( depthPitch_ ) + {} + + VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const& rhs ) VULKAN_HPP_NOEXCEPT + : offset( rhs.offset ) + , size( rhs.size ) + , rowPitch( rhs.rowPitch ) + , arrayPitch( rhs.arrayPitch ) + , depthPitch( rhs.depthPitch ) + {} + + SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SubresourceLayout ) ); + return *this; + } + + SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubresourceLayout& operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubresourceLayout const& ) const = default; +#else + bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) + && ( size == rhs.size ) + && ( rowPitch == rhs.rowPitch ) + && ( arrayPitch == rhs.arrayPitch ) + && ( depthPitch == rhs.depthPitch ); + } + + bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {}; + VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {}; + }; + static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageDrmFormatModifierExplicitCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_ = {}, + uint32_t drmFormatModifierPlaneCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) + , pPlaneLayouts( pPlaneLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , drmFormatModifier( rhs.drmFormatModifier ) + , drmFormatModifierPlaneCount( rhs.drmFormatModifierPlaneCount ) + , pPlaneLayouts( rhs.pPlaneLayouts ) + {} + + ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) - offsetof( ImageDrmFormatModifierExplicitCreateInfoEXT, pNext ) ); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT& operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_; + return *this; + } + + ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pPlaneLayouts = pPlaneLayouts_; + return *this; + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ) + && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount ) + && ( pPlaneLayouts == rhs.pPlaneLayouts ); + } + + bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT; + const void* pNext = {}; + uint64_t drmFormatModifier = {}; + uint32_t drmFormatModifierPlaneCount = {}; + const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageDrmFormatModifierListCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, + const uint64_t* pDrmFormatModifiers_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifierCount( drmFormatModifierCount_ ) + , pDrmFormatModifiers( pDrmFormatModifiers_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , drmFormatModifierCount( rhs.drmFormatModifierCount ) + , pDrmFormatModifiers( rhs.pDrmFormatModifiers ) + {} + + ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierListCreateInfoEXT ) - offsetof( ImageDrmFormatModifierListCreateInfoEXT, pNext ) ); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageDrmFormatModifierListCreateInfoEXT& operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifierCount = drmFormatModifierCount_; + return *this; + } + + ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT + { + pDrmFormatModifiers = pDrmFormatModifiers_; + return *this; + } + + operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifierCount == rhs.drmFormatModifierCount ) + && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers ); + } + + bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT; + const void* pNext = {}; + uint32_t drmFormatModifierCount = {}; + const uint64_t* pDrmFormatModifiers = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageDrmFormatModifierPropertiesEXT + { + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , drmFormatModifier( rhs.drmFormatModifier ) + {} + + ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageDrmFormatModifierPropertiesEXT ) - offsetof( ImageDrmFormatModifierPropertiesEXT, pNext ) ); + return *this; + } + + ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageDrmFormatModifierPropertiesEXT& operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default; +#else + bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ); + } + + bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT; + void* pNext = {}; + uint64_t drmFormatModifier = {}; + }; + static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageFormatListCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfoKHR( uint32_t viewFormatCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {} ) VULKAN_HPP_NOEXCEPT + : viewFormatCount( viewFormatCount_ ) + , pViewFormats( pViewFormats_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfoKHR( ImageFormatListCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , viewFormatCount( rhs.viewFormatCount ) + , pViewFormats( rhs.pViewFormats ) + {} + + ImageFormatListCreateInfoKHR & operator=( ImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageFormatListCreateInfoKHR ) - offsetof( ImageFormatListCreateInfoKHR, pNext ) ); + return *this; + } + + ImageFormatListCreateInfoKHR( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageFormatListCreateInfoKHR& operator=( VkImageFormatListCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageFormatListCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageFormatListCreateInfoKHR & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT + { + viewFormatCount = viewFormatCount_; + return *this; + } + + ImageFormatListCreateInfoKHR & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT + { + pViewFormats = pViewFormats_; + return *this; + } + + operator VkImageFormatListCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatListCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatListCreateInfoKHR const& ) const = default; +#else + bool operator==( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewFormatCount == rhs.viewFormatCount ) + && ( pViewFormats == rhs.pViewFormats ); + } + + bool operator!=( ImageFormatListCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfoKHR; + const void* pNext = {}; + uint32_t viewFormatCount = {}; + const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {}; + }; + static_assert( sizeof( ImageFormatListCreateInfoKHR ) == sizeof( VkImageFormatListCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageFormatProperties2 + { + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFormatProperties( imageFormatProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , imageFormatProperties( rhs.imageFormatProperties ) + {} + + ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageFormatProperties2 ) - offsetof( ImageFormatProperties2, pNext ) ); + return *this; + } + + ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageFormatProperties2& operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageFormatProperties2 const& ) const = default; +#else + bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFormatProperties == rhs.imageFormatProperties ); + } + + bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {}; + }; + static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSubresourceRange + { + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + uint32_t baseMipLevel_ = {}, + uint32_t levelCount_ = {}, + uint32_t baseArrayLayer_ = {}, + uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , baseMipLevel( baseMipLevel_ ) + , levelCount( levelCount_ ) + , baseArrayLayer( baseArrayLayer_ ) + , layerCount( layerCount_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const& rhs ) VULKAN_HPP_NOEXCEPT + : aspectMask( rhs.aspectMask ) + , baseMipLevel( rhs.baseMipLevel ) + , levelCount( rhs.levelCount ) + , baseArrayLayer( rhs.baseArrayLayer ) + , layerCount( rhs.layerCount ) + {} + + ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageSubresourceRange ) ); + return *this; + } + + ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageSubresourceRange& operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT + { + baseMipLevel = baseMipLevel_; + return *this; + } + + ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT + { + levelCount = levelCount_; + return *this; + } + + ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT + { + baseArrayLayer = baseArrayLayer_; + return *this; + } + + ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT + { + layerCount = layerCount_; + return *this; + } + + operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSubresourceRange const& ) const = default; +#else + bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( baseMipLevel == rhs.baseMipLevel ) + && ( levelCount == rhs.levelCount ) + && ( baseArrayLayer == rhs.baseArrayLayer ) + && ( layerCount == rhs.layerCount ); + } + + bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + uint32_t baseMipLevel = {}; + uint32_t levelCount = {}; + uint32_t baseArrayLayer = {}; + uint32_t layerCount = {}; + }; + static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageMemoryBarrier + { + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t srcQueueFamilyIndex_ = {}, + uint32_t dstQueueFamilyIndex_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , oldLayout( oldLayout_ ) + , newLayout( newLayout_ ) + , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) + , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) + , image( image_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcAccessMask( rhs.srcAccessMask ) + , dstAccessMask( rhs.dstAccessMask ) + , oldLayout( rhs.oldLayout ) + , newLayout( rhs.newLayout ) + , srcQueueFamilyIndex( rhs.srcQueueFamilyIndex ) + , dstQueueFamilyIndex( rhs.dstQueueFamilyIndex ) + , image( rhs.image ) + , subresourceRange( rhs.subresourceRange ) + {} + + ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageMemoryBarrier ) - offsetof( ImageMemoryBarrier, pNext ) ); + return *this; + } + + ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageMemoryBarrier& operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + { + oldLayout = oldLayout_; + return *this; + } + + ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + { + newLayout = newLayout_; + return *this; + } + + ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + srcQueueFamilyIndex = srcQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT + { + dstQueueFamilyIndex = dstQueueFamilyIndex_; + return *this; + } + + ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageMemoryBarrier const& ) const = default; +#else + bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( oldLayout == rhs.oldLayout ) + && ( newLayout == rhs.newLayout ) + && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex ) + && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex ) + && ( image == rhs.image ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t srcQueueFamilyIndex = {}; + uint32_t dstQueueFamilyIndex = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageMemoryRequirementsInfo2 + { + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , image( rhs.image ) + {} + + ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageMemoryRequirementsInfo2 ) - offsetof( ImageMemoryRequirementsInfo2, pNext ) ); + return *this; + } + + ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageMemoryRequirementsInfo2& operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default; +#else + bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_FUCHSIA + struct ImagePipeSurfaceCreateInfoFUCHSIA + { + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, + zx_handle_t imagePipeHandle_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , imagePipeHandle( imagePipeHandle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , imagePipeHandle( rhs.imagePipeHandle ) + {} + + ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) - offsetof( ImagePipeSurfaceCreateInfoFUCHSIA, pNext ) ); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImagePipeSurfaceCreateInfoFUCHSIA& operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT + { + imagePipeHandle = imagePipeHandle_; + return *this; + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default; +#else + bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( imagePipeHandle == rhs.imagePipeHandle ); + } + + bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {}; + zx_handle_t imagePipeHandle = {}; + }; + static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + struct ImagePlaneMemoryRequirementsInfo + { + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor ) VULKAN_HPP_NOEXCEPT + : planeAspect( planeAspect_ ) + {} + + VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , planeAspect( rhs.planeAspect ) + {} + + ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImagePlaneMemoryRequirementsInfo ) - offsetof( ImagePlaneMemoryRequirementsInfo, pNext ) ); + return *this; + } + + ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImagePlaneMemoryRequirementsInfo& operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT + { + planeAspect = planeAspect_; + return *this; + } + + operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default; +#else + bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( planeAspect == rhs.planeAspect ); + } + + bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor; + }; + static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageResolve + { + VULKAN_HPP_CONSTEXPR ImageResolve( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubresource( srcSubresource_ ) + , srcOffset( srcOffset_ ) + , dstSubresource( dstSubresource_ ) + , dstOffset( dstOffset_ ) + , extent( extent_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const& rhs ) VULKAN_HPP_NOEXCEPT + : srcSubresource( rhs.srcSubresource ) + , srcOffset( rhs.srcOffset ) + , dstSubresource( rhs.dstSubresource ) + , dstOffset( rhs.dstOffset ) + , extent( rhs.extent ) + {} + + ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ImageResolve ) ); + return *this; + } + + ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageResolve& operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ ) VULKAN_HPP_NOEXCEPT + { + srcSubresource = srcSubresource_; + return *this; + } + + ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ ) VULKAN_HPP_NOEXCEPT + { + srcOffset = srcOffset_; + return *this; + } + + ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ ) VULKAN_HPP_NOEXCEPT + { + dstSubresource = dstSubresource_; + return *this; + } + + ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ ) VULKAN_HPP_NOEXCEPT + { + dstOffset = dstOffset_; + return *this; + } + + ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageResolve &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageResolve const& ) const = default; +#else + bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubresource == rhs.srcSubresource ) + && ( srcOffset == rhs.srcOffset ) + && ( dstSubresource == rhs.dstSubresource ) + && ( dstOffset == rhs.dstOffset ) + && ( extent == rhs.extent ); + } + + bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {}; + VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {}; + VULKAN_HPP_NAMESPACE::Extent3D extent = {}; + }; + static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSparseMemoryRequirementsInfo2 + { + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , image( rhs.image ) + {} + + ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageSparseMemoryRequirementsInfo2 ) - offsetof( ImageSparseMemoryRequirementsInfo2, pNext ) ); + return *this; + } + + ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageSparseMemoryRequirementsInfo2& operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default; +#else + bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ); + } + + bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + }; + static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageStencilUsageCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfoEXT( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : stencilUsage( stencilUsage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfoEXT( ImageStencilUsageCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , stencilUsage( rhs.stencilUsage ) + {} + + ImageStencilUsageCreateInfoEXT & operator=( ImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageStencilUsageCreateInfoEXT ) - offsetof( ImageStencilUsageCreateInfoEXT, pNext ) ); + return *this; + } + + ImageStencilUsageCreateInfoEXT( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageStencilUsageCreateInfoEXT& operator=( VkImageStencilUsageCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageStencilUsageCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageStencilUsageCreateInfoEXT & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT + { + stencilUsage = stencilUsage_; + return *this; + } + + operator VkImageStencilUsageCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageStencilUsageCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageStencilUsageCreateInfoEXT const& ) const = default; +#else + bool operator==( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stencilUsage == rhs.stencilUsage ); + } + + bool operator!=( ImageStencilUsageCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {}; + }; + static_assert( sizeof( ImageStencilUsageCreateInfoEXT ) == sizeof( VkImageStencilUsageCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageSwapchainCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchain( swapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchain( rhs.swapchain ) + {} + + ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageSwapchainCreateInfoKHR ) - offsetof( ImageSwapchainCreateInfoKHR, pNext ) ); + return *this; + } + + ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageSwapchainCreateInfoKHR& operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT + { + swapchain = swapchain_; + return *this; + } + + operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchain == rhs.swapchain ); + } + + bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {}; + }; + static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewASTCDecodeModeEXT + { + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined ) VULKAN_HPP_NOEXCEPT + : decodeMode( decodeMode_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , decodeMode( rhs.decodeMode ) + {} + + ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageViewASTCDecodeModeEXT ) - offsetof( ImageViewASTCDecodeModeEXT, pNext ) ); + return *this; + } + + ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageViewASTCDecodeModeEXT& operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT + { + decodeMode = decodeMode_; + return *this; + } + + operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default; +#else + bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( decodeMode == rhs.decodeMode ); + } + + bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined; + }; + static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewCreateInfo + { + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, + VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , image( image_ ) + , viewType( viewType_ ) + , format( format_ ) + , components( components_ ) + , subresourceRange( subresourceRange_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , image( rhs.image ) + , viewType( rhs.viewType ) + , format( rhs.format ) + , components( rhs.components ) + , subresourceRange( rhs.subresourceRange ) + {} + + ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageViewCreateInfo ) - offsetof( ImageViewCreateInfo, pNext ) ); + return *this; + } + + ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageViewCreateInfo& operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT + { + viewType = viewType_; + return *this; + } + + ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ ) VULKAN_HPP_NOEXCEPT + { + subresourceRange = subresourceRange_; + return *this; + } + + operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewCreateInfo const& ) const = default; +#else + bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( image == rhs.image ) + && ( viewType == rhs.viewType ) + && ( format == rhs.format ) + && ( components == rhs.components ) + && ( subresourceRange == rhs.subresourceRange ); + } + + bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {}; + }; + static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewHandleInfoNVX + { + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + VULKAN_HPP_NAMESPACE::Sampler sampler_ = {} ) VULKAN_HPP_NOEXCEPT + : imageView( imageView_ ) + , descriptorType( descriptorType_ ) + , sampler( sampler_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , imageView( rhs.imageView ) + , descriptorType( rhs.descriptorType ) + , sampler( rhs.sampler ) + {} + + ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageViewHandleInfoNVX ) - offsetof( ImageViewHandleInfoNVX, pNext ) ); + return *this; + } + + ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageViewHandleInfoNVX& operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT + { + imageView = imageView_; + return *this; + } + + ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT + { + sampler = sampler_; + return *this; + } + + operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewHandleInfoNVX const& ) const = default; +#else + bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageView == rhs.imageView ) + && ( descriptorType == rhs.descriptorType ) + && ( sampler == rhs.sampler ); + } + + bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView imageView = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + VULKAN_HPP_NAMESPACE::Sampler sampler = {}; + }; + static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImageViewUsageCreateInfo + { + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {} ) VULKAN_HPP_NOEXCEPT + : usage( usage_ ) + {} + + VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , usage( rhs.usage ) + {} + + ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImageViewUsageCreateInfo ) - offsetof( ImageViewUsageCreateInfo, pNext ) ); + return *this; + } + + ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImageViewUsageCreateInfo& operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImageViewUsageCreateInfo const& ) const = default; +#else + bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( usage == rhs.usage ); + } + + bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + }; + static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkImport.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkImport.hpp new file mode 100644 index 00000000..02dd3ce9 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkImport.hpp @@ -0,0 +1,970 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkImport.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct ImportAndroidHardwareBufferInfoANDROID + { + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer* buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , buffer( rhs.buffer ) + {} + + ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportAndroidHardwareBufferInfoANDROID ) - offsetof( ImportAndroidHardwareBufferInfoANDROID, pNext ) ); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportAndroidHardwareBufferInfoANDROID& operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default; +#else + bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID; + const void* pNext = {}; + struct AHardwareBuffer* buffer = {}; + }; + static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct ImportFenceFdInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fence( rhs.fence ) + , flags( rhs.flags ) + , handleType( rhs.handleType ) + , fd( rhs.fd ) + {} + + ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportFenceFdInfoKHR ) - offsetof( ImportFenceFdInfoKHR, pNext ) ); + return *this; + } + + ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportFenceFdInfoKHR& operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportFenceFdInfoKHR const& ) const = default; +#else + bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportFenceWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, + VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : fence( fence_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fence( rhs.fence ) + , flags( rhs.flags ) + , handleType( rhs.handleType ) + , handle( rhs.handle ) + , name( rhs.name ) + {} + + ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportFenceWin32HandleInfoKHR ) - offsetof( ImportFenceWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportFenceWin32HandleInfoKHR& operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT + { + fence = fence_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fence == rhs.fence ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Fence fence = {}; + VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportMemoryFdInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + , fd( rhs.fd ) + {} + + ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryFdInfoKHR ) - offsetof( ImportMemoryFdInfoKHR, pNext ) ); + return *this; + } + + ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportMemoryFdInfoKHR& operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default; +#else + bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ImportMemoryHostPointerInfoEXT + { + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + void* pHostPointer_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , pHostPointer( pHostPointer_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + , pHostPointer( rhs.pHostPointer ) + {} + + ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryHostPointerInfoEXT ) - offsetof( ImportMemoryHostPointerInfoEXT, pNext ) ); + return *this; + } + + ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportMemoryHostPointerInfoEXT& operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT + { + pHostPointer = pHostPointer_; + return *this; + } + + operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default; +#else + bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( pHostPointer == rhs.pHostPointer ); + } + + bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + void* pHostPointer = {}; + }; + static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + , handle( rhs.handle ) + , name( rhs.name ) + {} + + ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoKHR ) - offsetof( ImportMemoryWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportMemoryWin32HandleInfoKHR& operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportMemoryWin32HandleInfoNV + { + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, + HANDLE handle_ = {} ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + , handle( handle_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + , handle( rhs.handle ) + {} + + ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportMemoryWin32HandleInfoNV ) - offsetof( ImportMemoryWin32HandleInfoNV, pNext ) ); + return *this; + } + + ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportMemoryWin32HandleInfoNV& operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default; +#else + bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ); + } + + bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {}; + HANDLE handle = {}; + }; + static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct ImportSemaphoreFdInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + int fd_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , fd( fd_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , semaphore( rhs.semaphore ) + , flags( rhs.flags ) + , handleType( rhs.handleType ) + , fd( rhs.fd ) + {} + + ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportSemaphoreFdInfoKHR ) - offsetof( ImportSemaphoreFdInfoKHR, pNext ) ); + return *this; + } + + ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportSemaphoreFdInfoKHR& operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT + { + fd = fd_; + return *this; + } + + operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default; +#else + bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( fd == rhs.fd ); + } + + bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + int fd = {}; + }; + static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct ImportSemaphoreWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, + HANDLE handle_ = {}, + LPCWSTR name_ = {} ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , flags( flags_ ) + , handleType( handleType_ ) + , handle( handle_ ) + , name( name_ ) + {} + + VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , semaphore( rhs.semaphore ) + , flags( rhs.flags ) + , handleType( rhs.handleType ) + , handle( rhs.handle ) + , name( rhs.name ) + {} + + ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ImportSemaphoreWin32HandleInfoKHR ) - offsetof( ImportSemaphoreWin32HandleInfoKHR, pNext ) ); + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ImportSemaphoreWin32HandleInfoKHR& operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT + { + handle = handle_; + return *this; + } + + ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT + { + name = name_; + return *this; + } + + operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( flags == rhs.flags ) + && ( handleType == rhs.handleType ) + && ( handle == rhs.handle ) + && ( name == rhs.name ); + } + + bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + HANDLE handle = {}; + LPCWSTR name = {}; + }; + static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkIndirect.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkIndirect.hpp new file mode 100644 index 00000000..d454df16 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkIndirect.hpp @@ -0,0 +1,289 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct IndirectCommandsLayoutTokenNVX + { + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline, + uint32_t bindingUnit_ = {}, + uint32_t dynamicCount_ = {}, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : tokenType( tokenType_ ) + , bindingUnit( bindingUnit_ ) + , dynamicCount( dynamicCount_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNVX( IndirectCommandsLayoutTokenNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : tokenType( rhs.tokenType ) + , bindingUnit( rhs.bindingUnit ) + , dynamicCount( rhs.dynamicCount ) + , divisor( rhs.divisor ) + {} + + IndirectCommandsLayoutTokenNVX & operator=( IndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( IndirectCommandsLayoutTokenNVX ) ); + return *this; + } + + IndirectCommandsLayoutTokenNVX( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + IndirectCommandsLayoutTokenNVX& operator=( VkIndirectCommandsLayoutTokenNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + IndirectCommandsLayoutTokenNVX & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType_ ) VULKAN_HPP_NOEXCEPT + { + tokenType = tokenType_; + return *this; + } + + IndirectCommandsLayoutTokenNVX & setBindingUnit( uint32_t bindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + bindingUnit = bindingUnit_; + return *this; + } + + IndirectCommandsLayoutTokenNVX & setDynamicCount( uint32_t dynamicCount_ ) VULKAN_HPP_NOEXCEPT + { + dynamicCount = dynamicCount_; + return *this; + } + + IndirectCommandsLayoutTokenNVX & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } + + operator VkIndirectCommandsLayoutTokenNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutTokenNVX const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( tokenType == rhs.tokenType ) + && ( bindingUnit == rhs.bindingUnit ) + && ( dynamicCount == rhs.dynamicCount ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( IndirectCommandsLayoutTokenNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNVX::ePipeline; + uint32_t bindingUnit = {}; + uint32_t dynamicCount = {}; + uint32_t divisor = {}; + }; + static_assert( sizeof( IndirectCommandsLayoutTokenNVX ) == sizeof( VkIndirectCommandsLayoutTokenNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct IndirectCommandsLayoutCreateInfoNVX + { + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ = {}, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineBindPoint( pipelineBindPoint_ ) + , flags( flags_ ) + , tokenCount( tokenCount_ ) + , pTokens( pTokens_ ) + {} + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNVX( IndirectCommandsLayoutCreateInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , flags( rhs.flags ) + , tokenCount( rhs.tokenCount ) + , pTokens( rhs.pTokens ) + {} + + IndirectCommandsLayoutCreateInfoNVX & operator=( IndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( IndirectCommandsLayoutCreateInfoNVX ) - offsetof( IndirectCommandsLayoutCreateInfoNVX, pNext ) ); + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + IndirectCommandsLayoutCreateInfoNVX& operator=( VkIndirectCommandsLayoutCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + IndirectCommandsLayoutCreateInfoNVX & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + + operator VkIndirectCommandsLayoutCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( IndirectCommandsLayoutCreateInfoNVX const& ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( flags == rhs.flags ) + && ( tokenCount == rhs.tokenCount ) + && ( pTokens == rhs.pTokens ); + } + + bool operator!=( IndirectCommandsLayoutCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNVX; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNVX flags = {}; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNVX* pTokens = {}; + }; + static_assert( sizeof( IndirectCommandsLayoutCreateInfoNVX ) == sizeof( VkIndirectCommandsLayoutCreateInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkInitialize.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInitialize.hpp new file mode 100644 index 00000000..4aa9a10f --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInitialize.hpp @@ -0,0 +1,160 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct InitializePerformanceApiInfoINTEL + { + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void* pUserData_ = {} ) VULKAN_HPP_NOEXCEPT + : pUserData( pUserData_ ) + {} + + VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pUserData( rhs.pUserData ) + {} + + InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( InitializePerformanceApiInfoINTEL ) - offsetof( InitializePerformanceApiInfoINTEL, pNext ) ); + return *this; + } + + InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + InitializePerformanceApiInfoINTEL& operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT + { + pUserData = pUserData_; + return *this; + } + + operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default; +#else + bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pUserData == rhs.pUserData ); + } + + bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL; + const void* pNext = {}; + void* pUserData = {}; + }; + static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkInput.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInput.hpp new file mode 100644 index 00000000..d4f7d37a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInput.hpp @@ -0,0 +1,172 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct InputAttachmentAspectReference + { + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, + uint32_t inputAttachmentIndex_ = {}, + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT + : subpass( subpass_ ) + , inputAttachmentIndex( inputAttachmentIndex_ ) + , aspectMask( aspectMask_ ) + {} + + VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const& rhs ) VULKAN_HPP_NOEXCEPT + : subpass( rhs.subpass ) + , inputAttachmentIndex( rhs.inputAttachmentIndex ) + , aspectMask( rhs.aspectMask ) + {} + + InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( InputAttachmentAspectReference ) ); + return *this; + } + + InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + InputAttachmentAspectReference& operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT + { + subpass = subpass_; + return *this; + } + + InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentIndex = inputAttachmentIndex_; + return *this; + } + + InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT + { + aspectMask = aspectMask_; + return *this; + } + + operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InputAttachmentAspectReference const& ) const = default; +#else + bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpass == rhs.subpass ) + && ( inputAttachmentIndex == rhs.inputAttachmentIndex ) + && ( aspectMask == rhs.aspectMask ); + } + + bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpass = {}; + uint32_t inputAttachmentIndex = {}; + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + }; + static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkInstance.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInstance.hpp new file mode 100644 index 00000000..afa35165 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkInstance.hpp @@ -0,0 +1,217 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct InstanceCreateInfo + { + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, + const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, + uint32_t enabledLayerCount_ = {}, + const char* const* ppEnabledLayerNames_ = {}, + uint32_t enabledExtensionCount_ = {}, + const char* const* ppEnabledExtensionNames_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pApplicationInfo( pApplicationInfo_ ) + , enabledLayerCount( enabledLayerCount_ ) + , ppEnabledLayerNames( ppEnabledLayerNames_ ) + , enabledExtensionCount( enabledExtensionCount_ ) + , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + {} + + VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pApplicationInfo( rhs.pApplicationInfo ) + , enabledLayerCount( rhs.enabledLayerCount ) + , ppEnabledLayerNames( rhs.ppEnabledLayerNames ) + , enabledExtensionCount( rhs.enabledExtensionCount ) + , ppEnabledExtensionNames( rhs.ppEnabledExtensionNames ) + {} + + InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( InstanceCreateInfo ) - offsetof( InstanceCreateInfo, pNext ) ); + return *this; + } + + InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + InstanceCreateInfo& operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pApplicationInfo = pApplicationInfo_; + return *this; + } + + InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledLayerCount = enabledLayerCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledLayerNames( const char* const* ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledLayerNames = ppEnabledLayerNames_; + return *this; + } + + InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledExtensionCount = enabledExtensionCount_; + return *this; + } + + InstanceCreateInfo & setPpEnabledExtensionNames( const char* const* ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT + { + ppEnabledExtensionNames = ppEnabledExtensionNames_; + return *this; + } + + operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( InstanceCreateInfo const& ) const = default; +#else + bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pApplicationInfo == rhs.pApplicationInfo ) + && ( enabledLayerCount == rhs.enabledLayerCount ) + && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames ) + && ( enabledExtensionCount == rhs.enabledExtensionCount ) + && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames ); + } + + bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {}; + const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {}; + uint32_t enabledLayerCount = {}; + const char* const* ppEnabledLayerNames = {}; + uint32_t enabledExtensionCount = {}; + const char* const* ppEnabledExtensionNames = {}; + }; + static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkLayer.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkLayer.hpp new file mode 100644 index 00000000..642f3bc5 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkLayer.hpp @@ -0,0 +1,167 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkInstance.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct LayerProperties + { + VULKAN_HPP_CONSTEXPR_14 LayerProperties( std::array const& layerName_ = {}, + uint32_t specVersion_ = {}, + uint32_t implementationVersion_ = {}, + std::array const& description_ = {} ) VULKAN_HPP_NOEXCEPT + : layerName{} + , specVersion( specVersion_ ) + , implementationVersion( implementationVersion_ ) + , description{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( layerName, layerName_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); + } + + VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : layerName{} + , specVersion( rhs.specVersion ) + , implementationVersion( rhs.implementationVersion ) + , description{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( layerName, rhs.layerName ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, rhs.description ); + } + + LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( LayerProperties ) ); + return *this; + } + + LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + LayerProperties& operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( LayerProperties const& ) const = default; +#else + bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memcmp( layerName, rhs.layerName, VK_MAX_EXTENSION_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( specVersion == rhs.specVersion ) + && ( implementationVersion == rhs.implementationVersion ) + && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ); + } + + bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + char layerName[VK_MAX_EXTENSION_NAME_SIZE] = {}; + uint32_t specVersion = {}; + uint32_t implementationVersion = {}; + char description[VK_MAX_DESCRIPTION_SIZE] = {}; + }; + static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkMac.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMac.hpp new file mode 100644 index 00000000..39fd49fd --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMac.hpp @@ -0,0 +1,177 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_MACOS_MVK + struct MacOSSurfaceCreateInfoMVK + { + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, + const void* pView_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pView( pView_ ) + {} + + VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pView( rhs.pView ) + {} + + MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MacOSSurfaceCreateInfoMVK ) - offsetof( MacOSSurfaceCreateInfoMVK, pNext ) ); + return *this; + } + + MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MacOSSurfaceCreateInfoMVK& operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT + { + pView = pView_; + return *this; + } + + operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default; +#else + bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pView == rhs.pView ); + } + + bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {}; + const void* pView = {}; + }; + static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkMapped.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMapped.hpp new file mode 100644 index 00000000..58065439 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMapped.hpp @@ -0,0 +1,187 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct MappedMemoryRange + { + VULKAN_HPP_CONSTEXPR MappedMemoryRange( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memory( rhs.memory ) + , offset( rhs.offset ) + , size( rhs.size ) + {} + + MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MappedMemoryRange ) - offsetof( MappedMemoryRange, pNext ) ); + return *this; + } + + MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MappedMemoryRange& operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MappedMemoryRange const& ) const = default; +#else + bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::DeviceSize offset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + }; + static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkMemory.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMemory.hpp new file mode 100644 index 00000000..8b4be207 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMemory.hpp @@ -0,0 +1,1263 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkMemory.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct MemoryAllocateFlagsInfo + { + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, + uint32_t deviceMask_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , deviceMask( deviceMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , deviceMask( rhs.deviceMask ) + {} + + MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryAllocateFlagsInfo ) - offsetof( MemoryAllocateFlagsInfo, pNext ) ); + return *this; + } + + MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryAllocateFlagsInfo& operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT + { + deviceMask = deviceMask_; + return *this; + } + + operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default; +#else + bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( deviceMask == rhs.deviceMask ); + } + + bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {}; + uint32_t deviceMask = {}; + }; + static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryAllocateInfo + { + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, + uint32_t memoryTypeIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : allocationSize( allocationSize_ ) + , memoryTypeIndex( memoryTypeIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , allocationSize( rhs.allocationSize ) + , memoryTypeIndex( rhs.memoryTypeIndex ) + {} + + MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryAllocateInfo ) - offsetof( MemoryAllocateInfo, pNext ) ); + return *this; + } + + MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryAllocateInfo& operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT + { + allocationSize = allocationSize_; + return *this; + } + + MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT + { + memoryTypeIndex = memoryTypeIndex_; + return *this; + } + + operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryAllocateInfo const& ) const = default; +#else + bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( allocationSize == rhs.allocationSize ) + && ( memoryTypeIndex == rhs.memoryTypeIndex ); + } + + bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {}; + uint32_t memoryTypeIndex = {}; + }; + static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryBarrier + { + VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {} ) VULKAN_HPP_NOEXCEPT + : srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcAccessMask( rhs.srcAccessMask ) + , dstAccessMask( rhs.dstAccessMask ) + {} + + MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryBarrier ) - offsetof( MemoryBarrier, pNext ) ); + return *this; + } + + MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryBarrier& operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryBarrier const& ) const = default; +#else + bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ); + } + + bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + }; + static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryDedicatedAllocateInfo + { + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : image( image_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , image( rhs.image ) + , buffer( rhs.buffer ) + {} + + MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedAllocateInfo ) - offsetof( MemoryDedicatedAllocateInfo, pNext ) ); + return *this; + } + + MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryDedicatedAllocateInfo& operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + { + image = image_; + return *this; + } + + MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default; +#else + bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( image == rhs.image ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Image image = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryDedicatedRequirements + { + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : prefersDedicatedAllocation( prefersDedicatedAllocation_ ) + , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , prefersDedicatedAllocation( rhs.prefersDedicatedAllocation ) + , requiresDedicatedAllocation( rhs.requiresDedicatedAllocation ) + {} + + MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryDedicatedRequirements ) - offsetof( MemoryDedicatedRequirements, pNext ) ); + return *this; + } + + MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryDedicatedRequirements& operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryDedicatedRequirements const& ) const = default; +#else + bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation ) + && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation ); + } + + bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {}; + }; + static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryFdPropertiesKHR + { + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryTypeBits( rhs.memoryTypeBits ) + {} + + MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryFdPropertiesKHR ) - offsetof( MemoryFdPropertiesKHR, pNext ) ); + return *this; + } + + MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryFdPropertiesKHR& operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryFdPropertiesKHR const& ) const = default; +#else + bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_ANDROID_KHR + struct MemoryGetAndroidHardwareBufferInfoANDROID + { + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {} ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memory( rhs.memory ) + {} + + MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) - offsetof( MemoryGetAndroidHardwareBufferInfoANDROID, pNext ) ); + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryGetAndroidHardwareBufferInfoANDROID& operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default; +#else + bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ); + } + + bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + }; + static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + struct MemoryGetFdInfoKHR + { + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memory( rhs.memory ) + , handleType( rhs.handleType ) + {} + + MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetFdInfoKHR ) - offsetof( MemoryGetFdInfoKHR, pNext ) ); + return *this; + } + + MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryGetFdInfoKHR& operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetFdInfoKHR const& ) const = default; +#else + bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryGetWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : memory( memory_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memory( rhs.memory ) + , handleType( rhs.handleType ) + {} + + MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryGetWin32HandleInfoKHR ) - offsetof( MemoryGetWin32HandleInfoKHR, pNext ) ); + return *this; + } + + MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryGetWin32HandleInfoKHR& operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + { + memory = memory_; + return *this; + } + + MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memory == rhs.memory ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct MemoryHeap + { + VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const& rhs ) VULKAN_HPP_NOEXCEPT + : size( rhs.size ) + , flags( rhs.flags ) + {} + + MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( MemoryHeap ) ); + return *this; + } + + MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryHeap& operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryHeap const& ) const = default; +#else + bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) + && ( flags == rhs.flags ); + } + + bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {}; + }; + static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryHostPointerPropertiesEXT + { + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryTypeBits( rhs.memoryTypeBits ) + {} + + MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryHostPointerPropertiesEXT ) - offsetof( MemoryHostPointerPropertiesEXT, pNext ) ); + return *this; + } + + MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryHostPointerPropertiesEXT& operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default; +#else + bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryPriorityAllocateInfoEXT + { + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {} ) VULKAN_HPP_NOEXCEPT + : priority( priority_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , priority( rhs.priority ) + {} + + MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryPriorityAllocateInfoEXT ) - offsetof( MemoryPriorityAllocateInfoEXT, pNext ) ); + return *this; + } + + MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryPriorityAllocateInfoEXT& operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT + { + priority = priority_; + return *this; + } + + operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default; +#else + bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( priority == rhs.priority ); + } + + bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT; + const void* pNext = {}; + float priority = {}; + }; + static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryRequirements + { + VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, + uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : size( size_ ) + , alignment( alignment_ ) + , memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const& rhs ) VULKAN_HPP_NOEXCEPT + : size( rhs.size ) + , alignment( rhs.alignment ) + , memoryTypeBits( rhs.memoryTypeBits ) + {} + + MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( MemoryRequirements ) ); + return *this; + } + + MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryRequirements& operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryRequirements const& ) const = default; +#else + bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( size == rhs.size ) + && ( alignment == rhs.alignment ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::DeviceSize alignment = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryRequirements2 + { + VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryRequirements( rhs.memoryRequirements ) + {} + + MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryRequirements2 ) - offsetof( MemoryRequirements2, pNext ) ); + return *this; + } + + MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryRequirements2& operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryRequirements2 const& ) const = default; +#else + bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct MemoryType + { + VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, + uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : propertyFlags( propertyFlags_ ) + , heapIndex( heapIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const& rhs ) VULKAN_HPP_NOEXCEPT + : propertyFlags( rhs.propertyFlags ) + , heapIndex( rhs.heapIndex ) + {} + + MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( MemoryType ) ); + return *this; + } + + MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryType& operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryType &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryType const& ) const = default; +#else + bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( propertyFlags == rhs.propertyFlags ) + && ( heapIndex == rhs.heapIndex ); + } + + bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {}; + uint32_t heapIndex = {}; + }; + static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct MemoryWin32HandlePropertiesKHR + { + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeBits( memoryTypeBits_ ) + {} + + VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryTypeBits( rhs.memoryTypeBits ) + {} + + MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MemoryWin32HandlePropertiesKHR ) - offsetof( MemoryWin32HandlePropertiesKHR, pNext ) ); + return *this; + } + + MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MemoryWin32HandlePropertiesKHR& operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default; +#else + bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryTypeBits == rhs.memoryTypeBits ); + } + + bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR; + void* pNext = {}; + uint32_t memoryTypeBits = {}; + }; + static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkMetal.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMetal.hpp new file mode 100644 index 00000000..6c332601 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMetal.hpp @@ -0,0 +1,180 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_METAL_EXT + struct MetalSurfaceCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, + const CAMetalLayer* pLayer_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pLayer( pLayer_ ) + {} + + VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pLayer( rhs.pLayer ) + {} + + MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MetalSurfaceCreateInfoEXT ) - offsetof( MetalSurfaceCreateInfoEXT, pNext ) ); + return *this; + } + + MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MetalSurfaceCreateInfoEXT& operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT + { + pLayer = pLayer_; + return *this; + } + + operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default; +#else + bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pLayer == rhs.pLayer ); + } + + bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {}; + const CAMetalLayer* pLayer = {}; + }; + static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkMultisample.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMultisample.hpp new file mode 100644 index 00000000..739e476c --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkMultisample.hpp @@ -0,0 +1,156 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct MultisamplePropertiesEXT + { + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + {} + + VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxSampleLocationGridSize( rhs.maxSampleLocationGridSize ) + {} + + MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( MultisamplePropertiesEXT ) - offsetof( MultisamplePropertiesEXT, pNext ) ); + return *this; + } + + MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + MultisamplePropertiesEXT& operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( MultisamplePropertiesEXT const& ) const = default; +#else + bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ); + } + + bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + }; + static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkObject.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkObject.hpp new file mode 100644 index 00000000..03e081f0 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkObject.hpp @@ -0,0 +1,831 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkObject.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ObjectTableCreateInfoNVX + { + VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( uint32_t objectCount_ = {}, + const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ = {}, + const uint32_t* pObjectEntryCounts_ = {}, + const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ = {}, + uint32_t maxUniformBuffersPerDescriptor_ = {}, + uint32_t maxStorageBuffersPerDescriptor_ = {}, + uint32_t maxStorageImagesPerDescriptor_ = {}, + uint32_t maxSampledImagesPerDescriptor_ = {}, + uint32_t maxPipelineLayouts_ = {} ) VULKAN_HPP_NOEXCEPT + : objectCount( objectCount_ ) + , pObjectEntryTypes( pObjectEntryTypes_ ) + , pObjectEntryCounts( pObjectEntryCounts_ ) + , pObjectEntryUsageFlags( pObjectEntryUsageFlags_ ) + , maxUniformBuffersPerDescriptor( maxUniformBuffersPerDescriptor_ ) + , maxStorageBuffersPerDescriptor( maxStorageBuffersPerDescriptor_ ) + , maxStorageImagesPerDescriptor( maxStorageImagesPerDescriptor_ ) + , maxSampledImagesPerDescriptor( maxSampledImagesPerDescriptor_ ) + , maxPipelineLayouts( maxPipelineLayouts_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableCreateInfoNVX( ObjectTableCreateInfoNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , objectCount( rhs.objectCount ) + , pObjectEntryTypes( rhs.pObjectEntryTypes ) + , pObjectEntryCounts( rhs.pObjectEntryCounts ) + , pObjectEntryUsageFlags( rhs.pObjectEntryUsageFlags ) + , maxUniformBuffersPerDescriptor( rhs.maxUniformBuffersPerDescriptor ) + , maxStorageBuffersPerDescriptor( rhs.maxStorageBuffersPerDescriptor ) + , maxStorageImagesPerDescriptor( rhs.maxStorageImagesPerDescriptor ) + , maxSampledImagesPerDescriptor( rhs.maxSampledImagesPerDescriptor ) + , maxPipelineLayouts( rhs.maxPipelineLayouts ) + {} + + ObjectTableCreateInfoNVX & operator=( ObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ObjectTableCreateInfoNVX ) - offsetof( ObjectTableCreateInfoNVX, pNext ) ); + return *this; + } + + ObjectTableCreateInfoNVX( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTableCreateInfoNVX& operator=( VkObjectTableCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTableCreateInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ObjectTableCreateInfoNVX & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT + { + objectCount = objectCount_; + return *this; + } + + ObjectTableCreateInfoNVX & setPObjectEntryTypes( const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes_ ) VULKAN_HPP_NOEXCEPT + { + pObjectEntryTypes = pObjectEntryTypes_; + return *this; + } + + ObjectTableCreateInfoNVX & setPObjectEntryCounts( const uint32_t* pObjectEntryCounts_ ) VULKAN_HPP_NOEXCEPT + { + pObjectEntryCounts = pObjectEntryCounts_; + return *this; + } + + ObjectTableCreateInfoNVX & setPObjectEntryUsageFlags( const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags_ ) VULKAN_HPP_NOEXCEPT + { + pObjectEntryUsageFlags = pObjectEntryUsageFlags_; + return *this; + } + + ObjectTableCreateInfoNVX & setMaxUniformBuffersPerDescriptor( uint32_t maxUniformBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + maxUniformBuffersPerDescriptor = maxUniformBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX & setMaxStorageBuffersPerDescriptor( uint32_t maxStorageBuffersPerDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + maxStorageBuffersPerDescriptor = maxStorageBuffersPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX & setMaxStorageImagesPerDescriptor( uint32_t maxStorageImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + maxStorageImagesPerDescriptor = maxStorageImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX & setMaxSampledImagesPerDescriptor( uint32_t maxSampledImagesPerDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + maxSampledImagesPerDescriptor = maxSampledImagesPerDescriptor_; + return *this; + } + + ObjectTableCreateInfoNVX & setMaxPipelineLayouts( uint32_t maxPipelineLayouts_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineLayouts = maxPipelineLayouts_; + return *this; + } + + operator VkObjectTableCreateInfoNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTableCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableCreateInfoNVX const& ) const = default; +#else + bool operator==( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( objectCount == rhs.objectCount ) + && ( pObjectEntryTypes == rhs.pObjectEntryTypes ) + && ( pObjectEntryCounts == rhs.pObjectEntryCounts ) + && ( pObjectEntryUsageFlags == rhs.pObjectEntryUsageFlags ) + && ( maxUniformBuffersPerDescriptor == rhs.maxUniformBuffersPerDescriptor ) + && ( maxStorageBuffersPerDescriptor == rhs.maxStorageBuffersPerDescriptor ) + && ( maxStorageImagesPerDescriptor == rhs.maxStorageImagesPerDescriptor ) + && ( maxSampledImagesPerDescriptor == rhs.maxSampledImagesPerDescriptor ) + && ( maxPipelineLayouts == rhs.maxPipelineLayouts ); + } + + bool operator!=( ObjectTableCreateInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eObjectTableCreateInfoNVX; + const void* pNext = {}; + uint32_t objectCount = {}; + const VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX* pObjectEntryTypes = {}; + const uint32_t* pObjectEntryCounts = {}; + const VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX* pObjectEntryUsageFlags = {}; + uint32_t maxUniformBuffersPerDescriptor = {}; + uint32_t maxStorageBuffersPerDescriptor = {}; + uint32_t maxStorageImagesPerDescriptor = {}; + uint32_t maxSampledImagesPerDescriptor = {}; + uint32_t maxPipelineLayouts = {}; + }; + static_assert( sizeof( ObjectTableCreateInfoNVX ) == sizeof( VkObjectTableCreateInfoNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTableEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableEntryNVX( ObjectTableEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + {} + + ObjectTableEntryNVX & operator=( ObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTableEntryNVX ) ); + return *this; + } + + ObjectTableEntryNVX( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTableEntryNVX& operator=( VkObjectTableEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTableEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTableEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkObjectTableEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTableEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableEntryNVX const& ) const = default; +#else + bool operator==( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ); + } + + bool operator!=( ObjectTableEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + }; + static_assert( sizeof( ObjectTableEntryNVX ) == sizeof( VkObjectTableEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTableDescriptorSetEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , descriptorSet( descriptorSet_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableDescriptorSetEntryNVX( ObjectTableDescriptorSetEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + , pipelineLayout( rhs.pipelineLayout ) + , descriptorSet( rhs.descriptorSet ) + {} + + explicit ObjectTableDescriptorSetEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ = {} ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipelineLayout( pipelineLayout_ ) + , descriptorSet( descriptorSet_ ) + {} + + ObjectTableDescriptorSetEntryNVX & operator=( ObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTableDescriptorSetEntryNVX ) ); + return *this; + } + + ObjectTableDescriptorSetEntryNVX( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTableDescriptorSetEntryNVX& operator=( VkObjectTableDescriptorSetEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTableDescriptorSetEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTableDescriptorSetEntryNVX & setDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet_ ) VULKAN_HPP_NOEXCEPT + { + descriptorSet = descriptorSet_; + return *this; + } + + operator VkObjectTableDescriptorSetEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTableDescriptorSetEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableDescriptorSetEntryNVX const& ) const = default; +#else + bool operator==( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( descriptorSet == rhs.descriptorSet ); + } + + bool operator!=( ObjectTableDescriptorSetEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet = {}; + }; + static_assert( sizeof( ObjectTableDescriptorSetEntryNVX ) == sizeof( VkObjectTableDescriptorSetEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTableIndexBufferEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + , indexType( indexType_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableIndexBufferEntryNVX( ObjectTableIndexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + , buffer( rhs.buffer ) + , indexType( rhs.indexType ) + {} + + explicit ObjectTableIndexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , buffer( buffer_ ) + , indexType( indexType_ ) + {} + + ObjectTableIndexBufferEntryNVX & operator=( ObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTableIndexBufferEntryNVX ) ); + return *this; + } + + ObjectTableIndexBufferEntryNVX( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTableIndexBufferEntryNVX& operator=( VkObjectTableIndexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTableIndexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTableIndexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ObjectTableIndexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + ObjectTableIndexBufferEntryNVX & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } + + operator VkObjectTableIndexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTableIndexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableIndexBufferEntryNVX const& ) const = default; +#else + bool operator==( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ) + && ( indexType == rhs.indexType ); + } + + bool operator!=( ObjectTableIndexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + static_assert( sizeof( ObjectTableIndexBufferEntryNVX ) == sizeof( VkObjectTableIndexBufferEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTablePipelineEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTablePipelineEntryNVX( ObjectTablePipelineEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + , pipeline( rhs.pipeline ) + {} + + explicit ObjectTablePipelineEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipeline( pipeline_ ) + {} + + ObjectTablePipelineEntryNVX & operator=( ObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTablePipelineEntryNVX ) ); + return *this; + } + + ObjectTablePipelineEntryNVX( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTablePipelineEntryNVX& operator=( VkObjectTablePipelineEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTablePipelineEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTablePipelineEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ObjectTablePipelineEntryNVX & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + operator VkObjectTablePipelineEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTablePipelineEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTablePipelineEntryNVX const& ) const = default; +#else + bool operator==( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipeline == rhs.pipeline ); + } + + bool operator!=( ObjectTablePipelineEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + static_assert( sizeof( ObjectTablePipelineEntryNVX ) == sizeof( VkObjectTablePipelineEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTablePushConstantEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , pipelineLayout( pipelineLayout_ ) + , stageFlags( stageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTablePushConstantEntryNVX( ObjectTablePushConstantEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + , pipelineLayout( rhs.pipelineLayout ) + , stageFlags( rhs.stageFlags ) + {} + + explicit ObjectTablePushConstantEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {} ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , pipelineLayout( pipelineLayout_ ) + , stageFlags( stageFlags_ ) + {} + + ObjectTablePushConstantEntryNVX & operator=( ObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTablePushConstantEntryNVX ) ); + return *this; + } + + ObjectTablePushConstantEntryNVX( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTablePushConstantEntryNVX& operator=( VkObjectTablePushConstantEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTablePushConstantEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTablePushConstantEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ObjectTablePushConstantEntryNVX & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + ObjectTablePushConstantEntryNVX & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + operator VkObjectTablePushConstantEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTablePushConstantEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTablePushConstantEntryNVX const& ) const = default; +#else + bool operator==( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( pipelineLayout == rhs.pipelineLayout ) + && ( stageFlags == rhs.stageFlags ); + } + + bool operator!=( ObjectTablePushConstantEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + }; + static_assert( sizeof( ObjectTablePushConstantEntryNVX ) == sizeof( VkObjectTablePushConstantEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ObjectTableVertexBufferEntryNVX + { + VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet, + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ = {}, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , flags( flags_ ) + , buffer( buffer_ ) + {} + + VULKAN_HPP_CONSTEXPR ObjectTableVertexBufferEntryNVX( ObjectTableVertexBufferEntryNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , flags( rhs.flags ) + , buffer( rhs.buffer ) + {} + + explicit ObjectTableVertexBufferEntryNVX( ObjectTableEntryNVX const& objectTableEntryNVX, + VULKAN_HPP_NAMESPACE::Buffer buffer_ = {} ) + : type( objectTableEntryNVX.type ) + , flags( objectTableEntryNVX.flags ) + , buffer( buffer_ ) + {} + + ObjectTableVertexBufferEntryNVX & operator=( ObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ObjectTableVertexBufferEntryNVX ) ); + return *this; + } + + ObjectTableVertexBufferEntryNVX( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ObjectTableVertexBufferEntryNVX& operator=( VkObjectTableVertexBufferEntryNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ObjectTableVertexBufferEntryNVX & setType( VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + ObjectTableVertexBufferEntryNVX & setFlags( VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ObjectTableVertexBufferEntryNVX & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT + { + buffer = buffer_; + return *this; + } + + operator VkObjectTableVertexBufferEntryNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkObjectTableVertexBufferEntryNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ObjectTableVertexBufferEntryNVX const& ) const = default; +#else + bool operator==( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( type == rhs.type ) + && ( flags == rhs.flags ) + && ( buffer == rhs.buffer ); + } + + bool operator!=( ObjectTableVertexBufferEntryNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX type = VULKAN_HPP_NAMESPACE::ObjectEntryTypeNVX::eDescriptorSet; + VULKAN_HPP_NAMESPACE::ObjectEntryUsageFlagsNVX flags = {}; + VULKAN_HPP_NAMESPACE::Buffer buffer = {}; + }; + static_assert( sizeof( ObjectTableVertexBufferEntryNVX ) == sizeof( VkObjectTableVertexBufferEntryNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkOffset.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkOffset.hpp new file mode 100644 index 00000000..1392fc4d --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkOffset.hpp @@ -0,0 +1,97 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkOffset.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPast.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPast.hpp new file mode 100644 index 00000000..74684c7b --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPast.hpp @@ -0,0 +1,174 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct PastPresentationTimingGOOGLE + { + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {}, + uint64_t actualPresentTime_ = {}, + uint64_t earliestPresentTime_ = {}, + uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + , actualPresentTime( actualPresentTime_ ) + , earliestPresentTime( earliestPresentTime_ ) + , presentMargin( presentMargin_ ) + {} + + VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const& rhs ) VULKAN_HPP_NOEXCEPT + : presentID( rhs.presentID ) + , desiredPresentTime( rhs.desiredPresentTime ) + , actualPresentTime( rhs.actualPresentTime ) + , earliestPresentTime( rhs.earliestPresentTime ) + , presentMargin( rhs.presentMargin ) + {} + + PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PastPresentationTimingGOOGLE ) ); + return *this; + } + + PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PastPresentationTimingGOOGLE& operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default; +#else + bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ) + && ( actualPresentTime == rhs.actualPresentTime ) + && ( earliestPresentTime == rhs.earliestPresentTime ) + && ( presentMargin == rhs.presentMargin ); + } + + bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + uint64_t actualPresentTime = {}; + uint64_t earliestPresentTime = {}; + uint64_t presentMargin = {}; + }; + static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPerformance.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPerformance.hpp new file mode 100644 index 00000000..349ccc0d --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPerformance.hpp @@ -0,0 +1,564 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPerformance.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct PerformanceConfigurationAcquireInfoINTEL + { + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + {} + + PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PerformanceConfigurationAcquireInfoINTEL ) - offsetof( PerformanceConfigurationAcquireInfoINTEL, pNext ) ); + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PerformanceConfigurationAcquireInfoINTEL& operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ); + } + + bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated; + }; + static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PerformanceMarkerInfoINTEL + { + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {} ) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , marker( rhs.marker ) + {} + + PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PerformanceMarkerInfoINTEL ) - offsetof( PerformanceMarkerInfoINTEL, pNext ) ); + return *this; + } + + PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PerformanceMarkerInfoINTEL& operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL; + const void* pNext = {}; + uint64_t marker = {}; + }; + static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PerformanceOverrideInfoINTEL + { + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, + VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, + uint64_t parameter_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , enable( enable_ ) + , parameter( parameter_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , enable( rhs.enable ) + , parameter( rhs.parameter ) + {} + + PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PerformanceOverrideInfoINTEL ) - offsetof( PerformanceOverrideInfoINTEL, pNext ) ); + return *this; + } + + PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PerformanceOverrideInfoINTEL& operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT + { + enable = enable_; + return *this; + } + + PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT + { + parameter = parameter_; + return *this; + } + + operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( enable == rhs.enable ) + && ( parameter == rhs.parameter ); + } + + bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware; + VULKAN_HPP_NAMESPACE::Bool32 enable = {}; + uint64_t parameter = {}; + }; + static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PerformanceStreamMarkerInfoINTEL + { + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {} ) VULKAN_HPP_NOEXCEPT + : marker( marker_ ) + {} + + VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , marker( rhs.marker ) + {} + + PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PerformanceStreamMarkerInfoINTEL ) - offsetof( PerformanceStreamMarkerInfoINTEL, pNext ) ); + return *this; + } + + PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PerformanceStreamMarkerInfoINTEL& operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT + { + marker = marker_; + return *this; + } + + operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default; +#else + bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( marker == rhs.marker ); + } + + bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL; + const void* pNext = {}; + uint32_t marker = {}; + }; + static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + union PerformanceValueDataINTEL + { + PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + } + + PerformanceValueDataINTEL( uint32_t value32_ = {} ) + { + value32 = value32_; + } + + PerformanceValueDataINTEL( uint64_t value64_ ) + { + value64 = value64_; + } + + PerformanceValueDataINTEL( float valueFloat_ ) + { + valueFloat = valueFloat_; + } + + PerformanceValueDataINTEL( const char* valueString_ ) + { + valueString = valueString_; + } + + PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT + { + value32 = value32_; + return *this; + } + + PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT + { + value64 = value64_; + return *this; + } + + PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT + { + valueFloat = valueFloat_; + return *this; + } + + PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT + { + valueBool = valueBool_; + return *this; + } + + PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT + { + valueString = valueString_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) ); + return *this; + } + + operator VkPerformanceValueDataINTEL const&() const + { + return *reinterpret_cast(this); + } + + operator VkPerformanceValueDataINTEL &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + uint32_t value32; + uint64_t value64; + float valueFloat; + VULKAN_HPP_NAMESPACE::Bool32 valueBool; + const char* valueString; +#else + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PerformanceValueINTEL + { + PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , data( data_ ) + {} + + PerformanceValueINTEL( PerformanceValueINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : type( rhs.type ) + , data( rhs.data ) + {} + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PerformanceValueINTEL ) ); + return *this; + } + + PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PerformanceValueINTEL& operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32; + VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {}; + }; + static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPhysical.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPhysical.hpp new file mode 100644 index 00000000..cfd6b576 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPhysical.hpp @@ -0,0 +1,8924 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct PhysicalDevice16BitStorageFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer16BitAccess( storageBuffer16BitAccess_ ) + , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) + , storagePushConstant16( storagePushConstant16_ ) + , storageInputOutput16( storageInputOutput16_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , storageBuffer16BitAccess( rhs.storageBuffer16BitAccess ) + , uniformAndStorageBuffer16BitAccess( rhs.uniformAndStorageBuffer16BitAccess ) + , storagePushConstant16( rhs.storagePushConstant16 ) + , storageInputOutput16( rhs.storageInputOutput16 ) + {} + + PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice16BitStorageFeatures ) - offsetof( PhysicalDevice16BitStorageFeatures, pNext ) ); + return *this; + } + + PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevice16BitStorageFeatures& operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer16BitAccess = storageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant16 = storagePushConstant16_; + return *this; + } + + PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT + { + storageInputOutput16 = storageInputOutput16_; + return *this; + } + + operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default; +#else + bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess ) + && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess ) + && ( storagePushConstant16 == rhs.storagePushConstant16 ) + && ( storageInputOutput16 == rhs.storageInputOutput16 ); + } + + bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {}; + }; + static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDevice8BitStorageFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {} ) VULKAN_HPP_NOEXCEPT + : storageBuffer8BitAccess( storageBuffer8BitAccess_ ) + , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) + , storagePushConstant8( storagePushConstant8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeaturesKHR( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , storageBuffer8BitAccess( rhs.storageBuffer8BitAccess ) + , uniformAndStorageBuffer8BitAccess( rhs.uniformAndStorageBuffer8BitAccess ) + , storagePushConstant8( rhs.storagePushConstant8 ) + {} + + PhysicalDevice8BitStorageFeaturesKHR & operator=( PhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevice8BitStorageFeaturesKHR ) - offsetof( PhysicalDevice8BitStorageFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevice8BitStorageFeaturesKHR& operator=( VkPhysicalDevice8BitStorageFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + storageBuffer8BitAccess = storageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT + { + uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_; + return *this; + } + + PhysicalDevice8BitStorageFeaturesKHR & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT + { + storagePushConstant8 = storagePushConstant8_; + return *this; + } + + operator VkPhysicalDevice8BitStorageFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevice8BitStorageFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevice8BitStorageFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess ) + && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess ) + && ( storagePushConstant8 == rhs.storagePushConstant8 ); + } + + bool operator!=( PhysicalDevice8BitStorageFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {}; + }; + static_assert( sizeof( PhysicalDevice8BitStorageFeaturesKHR ) == sizeof( VkPhysicalDevice8BitStorageFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceASTCDecodeFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {} ) VULKAN_HPP_NOEXCEPT + : decodeModeSharedExponent( decodeModeSharedExponent_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , decodeModeSharedExponent( rhs.decodeModeSharedExponent ) + {} + + PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) - offsetof( PhysicalDeviceASTCDecodeFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceASTCDecodeFeaturesEXT& operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT + { + decodeModeSharedExponent = decodeModeSharedExponent_; + return *this; + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent ); + } + + bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {}; + }; + static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , advancedBlendCoherentOperations( rhs.advancedBlendCoherentOperations ) + {} + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT + { + advancedBlendCoherentOperations = advancedBlendCoherentOperations_; + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( uint32_t advancedBlendMaxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {} ) VULKAN_HPP_NOEXCEPT + : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) + , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) + , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) + , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) + , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) + , advancedBlendAllOperations( advancedBlendAllOperations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , advancedBlendMaxColorAttachments( rhs.advancedBlendMaxColorAttachments ) + , advancedBlendIndependentBlend( rhs.advancedBlendIndependentBlend ) + , advancedBlendNonPremultipliedSrcColor( rhs.advancedBlendNonPremultipliedSrcColor ) + , advancedBlendNonPremultipliedDstColor( rhs.advancedBlendNonPremultipliedDstColor ) + , advancedBlendCorrelatedOverlap( rhs.advancedBlendCorrelatedOverlap ) + , advancedBlendAllOperations( rhs.advancedBlendAllOperations ) + {} + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) - offsetof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceBlendOperationAdvancedPropertiesEXT& operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments ) + && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend ) + && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor ) + && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor ) + && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap ) + && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations ); + } + + bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT; + void* pNext = {}; + uint32_t advancedBlendMaxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {}; + VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {}; + }; + static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceBufferDeviceAddressFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferDeviceAddress( bufferDeviceAddress_ ) + , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) + , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , bufferDeviceAddress( rhs.bufferDeviceAddress ) + , bufferDeviceAddressCaptureReplay( rhs.bufferDeviceAddressCaptureReplay ) + , bufferDeviceAddressMultiDevice( rhs.bufferDeviceAddressMultiDevice ) + {} + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) - offsetof( PhysicalDeviceBufferDeviceAddressFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT& operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddress = bufferDeviceAddress_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_; + return *this; + } + + PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT + { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( bufferDeviceAddress == rhs.bufferDeviceAddress ) + && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay ) + && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice ); + } + + bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {}; + VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {}; + }; + static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceCoherentMemoryFeaturesAMD + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceCoherentMemory( deviceCoherentMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceCoherentMemory( rhs.deviceCoherentMemory ) + {} + + PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) - offsetof( PhysicalDeviceCoherentMemoryFeaturesAMD, pNext ) ); + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD& operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT + { + deviceCoherentMemory = deviceCoherentMemory_; + return *this; + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default; +#else + bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( deviceCoherentMemory == rhs.deviceCoherentMemory ); + } + + bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {} ) VULKAN_HPP_NOEXCEPT + : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) + , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , computeDerivativeGroupQuads( rhs.computeDerivativeGroupQuads ) + , computeDerivativeGroupLinear( rhs.computeDerivativeGroupLinear ) + {} + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) - offsetof( PhysicalDeviceComputeShaderDerivativesFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV& operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupQuads = computeDerivativeGroupQuads_; + return *this; + } + + PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT + { + computeDerivativeGroupLinear = computeDerivativeGroupLinear_; + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads ) + && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear ); + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; + }; + static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceConditionalRenderingFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {} ) VULKAN_HPP_NOEXCEPT + : conditionalRendering( conditionalRendering_ ) + , inheritedConditionalRendering( inheritedConditionalRendering_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , conditionalRendering( rhs.conditionalRendering ) + , inheritedConditionalRendering( rhs.inheritedConditionalRendering ) + {} + + PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) - offsetof( PhysicalDeviceConditionalRenderingFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT& operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + conditionalRendering = conditionalRendering_; + return *this; + } + + PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT + { + inheritedConditionalRendering = inheritedConditionalRendering_; + return *this; + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conditionalRendering == rhs.conditionalRendering ) + && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering ); + } + + bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {}; + VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {}; + }; + static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceConservativeRasterizationPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( float primitiveOverestimationSize_ = {}, + float maxExtraPrimitiveOverestimationSize_ = {}, + float extraPrimitiveOverestimationSizeGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {} ) VULKAN_HPP_NOEXCEPT + : primitiveOverestimationSize( primitiveOverestimationSize_ ) + , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) + , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) + , primitiveUnderestimation( primitiveUnderestimation_ ) + , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) + , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) + , degenerateLinesRasterized( degenerateLinesRasterized_ ) + , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) + , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , primitiveOverestimationSize( rhs.primitiveOverestimationSize ) + , maxExtraPrimitiveOverestimationSize( rhs.maxExtraPrimitiveOverestimationSize ) + , extraPrimitiveOverestimationSizeGranularity( rhs.extraPrimitiveOverestimationSizeGranularity ) + , primitiveUnderestimation( rhs.primitiveUnderestimation ) + , conservativePointAndLineRasterization( rhs.conservativePointAndLineRasterization ) + , degenerateTrianglesRasterized( rhs.degenerateTrianglesRasterized ) + , degenerateLinesRasterized( rhs.degenerateLinesRasterized ) + , fullyCoveredFragmentShaderInputVariable( rhs.fullyCoveredFragmentShaderInputVariable ) + , conservativeRasterizationPostDepthCoverage( rhs.conservativeRasterizationPostDepthCoverage ) + {} + + PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceConservativeRasterizationPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceConservativeRasterizationPropertiesEXT& operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize ) + && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize ) + && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity ) + && ( primitiveUnderestimation == rhs.primitiveUnderestimation ) + && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization ) + && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized ) + && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized ) + && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable ) + && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage ); + } + + bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT; + void* pNext = {}; + float primitiveOverestimationSize = {}; + float maxExtraPrimitiveOverestimationSize = {}; + float extraPrimitiveOverestimationSizeGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {}; + VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {}; + }; + static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceCooperativeMatrixFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrix( cooperativeMatrix_ ) + , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , cooperativeMatrix( rhs.cooperativeMatrix ) + , cooperativeMatrixRobustBufferAccess( rhs.cooperativeMatrixRobustBufferAccess ) + {} + + PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) - offsetof( PhysicalDeviceCooperativeMatrixFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV& operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrix = cooperativeMatrix_; + return *this; + } + + PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( cooperativeMatrix == rhs.cooperativeMatrix ) + && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceCooperativeMatrixPropertiesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {} ) VULKAN_HPP_NOEXCEPT + : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , cooperativeMatrixSupportedStages( rhs.cooperativeMatrixSupportedStages ) + {} + + PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) - offsetof( PhysicalDeviceCooperativeMatrixPropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceCooperativeMatrixPropertiesNV& operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); + } + + bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {}; + }; + static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceCornerSampledImageFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {} ) VULKAN_HPP_NOEXCEPT + : cornerSampledImage( cornerSampledImage_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , cornerSampledImage( rhs.cornerSampledImage ) + {} + + PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) - offsetof( PhysicalDeviceCornerSampledImageFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceCornerSampledImageFeaturesNV& operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT + { + cornerSampledImage = cornerSampledImage_; + return *this; + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( cornerSampledImage == rhs.cornerSampledImage ); + } + + bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {}; + }; + static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceCoverageReductionModeFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {} ) VULKAN_HPP_NOEXCEPT + : coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , coverageReductionMode( rhs.coverageReductionMode ) + {} + + PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) - offsetof( PhysicalDeviceCoverageReductionModeFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV& operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( coverageReductionMode == rhs.coverageReductionMode ); + } + + bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {}; + }; + static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {} ) VULKAN_HPP_NOEXCEPT + : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , dedicatedAllocationImageAliasing( rhs.dedicatedAllocationImageAliasing ) + {} + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) - offsetof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV& operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT + { + dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; + return *this; + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing ); + } + + bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {}; + }; + static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDepthClipEnableFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , depthClipEnable( rhs.depthClipEnable ) + {} + + PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) - offsetof( PhysicalDeviceDepthClipEnableFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT& operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( depthClipEnable == rhs.depthClipEnable ); + } + + bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDepthStencilResolvePropertiesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolvePropertiesKHR( VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {} ) VULKAN_HPP_NOEXCEPT + : supportedDepthResolveModes( supportedDepthResolveModes_ ) + , supportedStencilResolveModes( supportedStencilResolveModes_ ) + , independentResolveNone( independentResolveNone_ ) + , independentResolve( independentResolve_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolvePropertiesKHR( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , supportedDepthResolveModes( rhs.supportedDepthResolveModes ) + , supportedStencilResolveModes( rhs.supportedStencilResolveModes ) + , independentResolveNone( rhs.independentResolveNone ) + , independentResolve( rhs.independentResolve ) + {} + + PhysicalDeviceDepthStencilResolvePropertiesKHR & operator=( PhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) - offsetof( PhysicalDeviceDepthStencilResolvePropertiesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceDepthStencilResolvePropertiesKHR( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDepthStencilResolvePropertiesKHR& operator=( VkPhysicalDeviceDepthStencilResolvePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthStencilResolvePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDepthStencilResolvePropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes ) + && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes ) + && ( independentResolveNone == rhs.independentResolveNone ) + && ( independentResolve == rhs.independentResolve ); + } + + bool operator!=( PhysicalDeviceDepthStencilResolvePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolvePropertiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedDepthResolveModes = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR supportedStencilResolveModes = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {}; + VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {}; + }; + static_assert( sizeof( PhysicalDeviceDepthStencilResolvePropertiesKHR ) == sizeof( VkPhysicalDeviceDepthStencilResolvePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDescriptorIndexingFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) + , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) + , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) + , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) + , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) + , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) + , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) + , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) + , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) + , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) + , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) + , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) + , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) + , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) + , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) + , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) + , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) + , runtimeDescriptorArray( runtimeDescriptorArray_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeaturesEXT( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderInputAttachmentArrayDynamicIndexing( rhs.shaderInputAttachmentArrayDynamicIndexing ) + , shaderUniformTexelBufferArrayDynamicIndexing( rhs.shaderUniformTexelBufferArrayDynamicIndexing ) + , shaderStorageTexelBufferArrayDynamicIndexing( rhs.shaderStorageTexelBufferArrayDynamicIndexing ) + , shaderUniformBufferArrayNonUniformIndexing( rhs.shaderUniformBufferArrayNonUniformIndexing ) + , shaderSampledImageArrayNonUniformIndexing( rhs.shaderSampledImageArrayNonUniformIndexing ) + , shaderStorageBufferArrayNonUniformIndexing( rhs.shaderStorageBufferArrayNonUniformIndexing ) + , shaderStorageImageArrayNonUniformIndexing( rhs.shaderStorageImageArrayNonUniformIndexing ) + , shaderInputAttachmentArrayNonUniformIndexing( rhs.shaderInputAttachmentArrayNonUniformIndexing ) + , shaderUniformTexelBufferArrayNonUniformIndexing( rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) + , shaderStorageTexelBufferArrayNonUniformIndexing( rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) + , descriptorBindingUniformBufferUpdateAfterBind( rhs.descriptorBindingUniformBufferUpdateAfterBind ) + , descriptorBindingSampledImageUpdateAfterBind( rhs.descriptorBindingSampledImageUpdateAfterBind ) + , descriptorBindingStorageImageUpdateAfterBind( rhs.descriptorBindingStorageImageUpdateAfterBind ) + , descriptorBindingStorageBufferUpdateAfterBind( rhs.descriptorBindingStorageBufferUpdateAfterBind ) + , descriptorBindingUniformTexelBufferUpdateAfterBind( rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) + , descriptorBindingStorageTexelBufferUpdateAfterBind( rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) + , descriptorBindingUpdateUnusedWhilePending( rhs.descriptorBindingUpdateUnusedWhilePending ) + , descriptorBindingPartiallyBound( rhs.descriptorBindingPartiallyBound ) + , descriptorBindingVariableDescriptorCount( rhs.descriptorBindingVariableDescriptorCount ) + , runtimeDescriptorArray( rhs.runtimeDescriptorArray ) + {} + + PhysicalDeviceDescriptorIndexingFeaturesEXT & operator=( PhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT& operator=( VkPhysicalDeviceDescriptorIndexingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT + { + shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; + return *this; + } + + PhysicalDeviceDescriptorIndexingFeaturesEXT & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT + { + runtimeDescriptorArray = runtimeDescriptorArray_; + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDescriptorIndexingFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing ) + && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing ) + && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing ) + && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing ) + && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing ) + && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing ) + && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing ) + && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing ) + && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing ) + && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing ) + && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind ) + && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind ) + && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind ) + && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind ) + && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind ) + && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind ) + && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending ) + && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound ) + && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount ) + && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDescriptorIndexingPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingPropertiesEXT( uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, + uint32_t maxPerStageUpdateAfterBindResources_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) + , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) + , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) + , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) + , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) + , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) + , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) + , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) + , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) + , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) + , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) + , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) + , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) + , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingPropertiesEXT( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxUpdateAfterBindDescriptorsInAllPools( rhs.maxUpdateAfterBindDescriptorsInAllPools ) + , shaderUniformBufferArrayNonUniformIndexingNative( rhs.shaderUniformBufferArrayNonUniformIndexingNative ) + , shaderSampledImageArrayNonUniformIndexingNative( rhs.shaderSampledImageArrayNonUniformIndexingNative ) + , shaderStorageBufferArrayNonUniformIndexingNative( rhs.shaderStorageBufferArrayNonUniformIndexingNative ) + , shaderStorageImageArrayNonUniformIndexingNative( rhs.shaderStorageImageArrayNonUniformIndexingNative ) + , shaderInputAttachmentArrayNonUniformIndexingNative( rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) + , robustBufferAccessUpdateAfterBind( rhs.robustBufferAccessUpdateAfterBind ) + , quadDivergentImplicitLod( rhs.quadDivergentImplicitLod ) + , maxPerStageDescriptorUpdateAfterBindSamplers( rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) + , maxPerStageDescriptorUpdateAfterBindUniformBuffers( rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) + , maxPerStageDescriptorUpdateAfterBindStorageBuffers( rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) + , maxPerStageDescriptorUpdateAfterBindSampledImages( rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) + , maxPerStageDescriptorUpdateAfterBindStorageImages( rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) + , maxPerStageDescriptorUpdateAfterBindInputAttachments( rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) + , maxPerStageUpdateAfterBindResources( rhs.maxPerStageUpdateAfterBindResources ) + , maxDescriptorSetUpdateAfterBindSamplers( rhs.maxDescriptorSetUpdateAfterBindSamplers ) + , maxDescriptorSetUpdateAfterBindUniformBuffers( rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) + , maxDescriptorSetUpdateAfterBindStorageBuffers( rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) + , maxDescriptorSetUpdateAfterBindSampledImages( rhs.maxDescriptorSetUpdateAfterBindSampledImages ) + , maxDescriptorSetUpdateAfterBindStorageImages( rhs.maxDescriptorSetUpdateAfterBindStorageImages ) + , maxDescriptorSetUpdateAfterBindInputAttachments( rhs.maxDescriptorSetUpdateAfterBindInputAttachments ) + {} + + PhysicalDeviceDescriptorIndexingPropertiesEXT & operator=( PhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) - offsetof( PhysicalDeviceDescriptorIndexingPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceDescriptorIndexingPropertiesEXT( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDescriptorIndexingPropertiesEXT& operator=( VkPhysicalDeviceDescriptorIndexingPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDescriptorIndexingPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDescriptorIndexingPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools ) + && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative ) + && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative ) + && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative ) + && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative ) + && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative ) + && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind ) + && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod ) + && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers ) + && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers ) + && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages ) + && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages ) + && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments ) + && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources ) + && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers ) + && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers ) + && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic ) + && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages ) + && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages ) + && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments ); + } + + bool operator!=( PhysicalDeviceDescriptorIndexingPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingPropertiesEXT; + void* pNext = {}; + uint32_t maxUpdateAfterBindDescriptorsInAllPools = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {}; + uint32_t maxPerStageUpdateAfterBindResources = {}; + uint32_t maxDescriptorSetUpdateAfterBindSamplers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {}; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {}; + }; + static_assert( sizeof( PhysicalDeviceDescriptorIndexingPropertiesEXT ) == sizeof( VkPhysicalDeviceDescriptorIndexingPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDiscardRectanglePropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDiscardRectangles( maxDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxDiscardRectangles( rhs.maxDiscardRectangles ) + {} + + PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) - offsetof( PhysicalDeviceDiscardRectanglePropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDiscardRectanglePropertiesEXT& operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); + } + + bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; + void* pNext = {}; + uint32_t maxDiscardRectangles = {}; + }; + static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceDriverPropertiesKHR + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverPropertiesKHR( VULKAN_HPP_NAMESPACE::DriverIdKHR driverID_ = VULKAN_HPP_NAMESPACE::DriverIdKHR::eAmdProprietary, + std::array const& driverName_ = {}, + std::array const& driverInfo_ = {}, + VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion_ = {} ) VULKAN_HPP_NOEXCEPT + : driverID( driverID_ ) + , driverName{} + , driverInfo{} + , conformanceVersion( conformanceVersion_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverName, driverName_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverInfo, driverInfo_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverPropertiesKHR( PhysicalDeviceDriverPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , driverID( rhs.driverID ) + , driverName{} + , driverInfo{} + , conformanceVersion( rhs.conformanceVersion ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverName, rhs.driverName ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverInfo, rhs.driverInfo ); + } + + PhysicalDeviceDriverPropertiesKHR & operator=( PhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceDriverPropertiesKHR ) - offsetof( PhysicalDeviceDriverPropertiesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceDriverPropertiesKHR( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceDriverPropertiesKHR& operator=( VkPhysicalDeviceDriverPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceDriverPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDriverPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceDriverPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( driverID == rhs.driverID ) + && ( memcmp( driverName, rhs.driverName, VK_MAX_DRIVER_NAME_SIZE_KHR * sizeof( char ) ) == 0 ) + && ( memcmp( driverInfo, rhs.driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR * sizeof( char ) ) == 0 ) + && ( conformanceVersion == rhs.conformanceVersion ); + } + + bool operator!=( PhysicalDeviceDriverPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverPropertiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DriverIdKHR driverID = VULKAN_HPP_NAMESPACE::DriverIdKHR::eAmdProprietary; + char driverName[VK_MAX_DRIVER_NAME_SIZE_KHR] = {}; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE_KHR] = {}; + VULKAN_HPP_NAMESPACE::ConformanceVersionKHR conformanceVersion = {}; + }; + static_assert( sizeof( PhysicalDeviceDriverPropertiesKHR ) == sizeof( VkPhysicalDeviceDriverPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExclusiveScissorFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {} ) VULKAN_HPP_NOEXCEPT + : exclusiveScissor( exclusiveScissor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , exclusiveScissor( rhs.exclusiveScissor ) + {} + + PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) - offsetof( PhysicalDeviceExclusiveScissorFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExclusiveScissorFeaturesNV& operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissor = exclusiveScissor_; + return *this; + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exclusiveScissor == rhs.exclusiveScissor ); + } + + bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {}; + }; + static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalBufferInfo + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , usage( usage_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , usage( rhs.usage ) + , handleType( rhs.handleType ) + {} + + PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalBufferInfo ) - offsetof( PhysicalDeviceExternalBufferInfo, pNext ) ); + return *this; + } + + PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExternalBufferInfo& operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( usage == rhs.usage ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalFenceInfo + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + {} + + PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalFenceInfo ) - offsetof( PhysicalDeviceExternalFenceInfo, pNext ) ); + return *this; + } + + PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExternalFenceInfo& operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalImageFormatInfo + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + {} + + PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalImageFormatInfo ) - offsetof( PhysicalDeviceExternalImageFormatInfo, pNext ) ); + return *this; + } + + PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExternalImageFormatInfo& operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalMemoryHostPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , minImportedHostPointerAlignment( rhs.minImportedHostPointerAlignment ) + {} + + PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) - offsetof( PhysicalDeviceExternalMemoryHostPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExternalMemoryHostPropertiesEXT& operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment ); + } + + bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceExternalSemaphoreInfo + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , handleType( rhs.handleType ) + {} + + PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceExternalSemaphoreInfo ) - offsetof( PhysicalDeviceExternalSemaphoreInfo, pNext ) ); + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceExternalSemaphoreInfo& operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default; +#else + bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFeatures2 + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {} ) VULKAN_HPP_NOEXCEPT + : features( features_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , features( rhs.features ) + {} + + PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFeatures2 ) - offsetof( PhysicalDeviceFeatures2, pNext ) ); + return *this; + } + + PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFeatures2& operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ ) VULKAN_HPP_NOEXCEPT + { + features = features_; + return *this; + } + + operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default; +#else + bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( features == rhs.features ); + } + + bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {}; + }; + static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFloatControlsPropertiesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly, + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {} ) VULKAN_HPP_NOEXCEPT + : denormBehaviorIndependence( denormBehaviorIndependence_ ) + , roundingModeIndependence( roundingModeIndependence_ ) + , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) + , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) + , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) + , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) + , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) + , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) + , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) + , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) + , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) + , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) + , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) + , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) + , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) + , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) + , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsPropertiesKHR( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , denormBehaviorIndependence( rhs.denormBehaviorIndependence ) + , roundingModeIndependence( rhs.roundingModeIndependence ) + , shaderSignedZeroInfNanPreserveFloat16( rhs.shaderSignedZeroInfNanPreserveFloat16 ) + , shaderSignedZeroInfNanPreserveFloat32( rhs.shaderSignedZeroInfNanPreserveFloat32 ) + , shaderSignedZeroInfNanPreserveFloat64( rhs.shaderSignedZeroInfNanPreserveFloat64 ) + , shaderDenormPreserveFloat16( rhs.shaderDenormPreserveFloat16 ) + , shaderDenormPreserveFloat32( rhs.shaderDenormPreserveFloat32 ) + , shaderDenormPreserveFloat64( rhs.shaderDenormPreserveFloat64 ) + , shaderDenormFlushToZeroFloat16( rhs.shaderDenormFlushToZeroFloat16 ) + , shaderDenormFlushToZeroFloat32( rhs.shaderDenormFlushToZeroFloat32 ) + , shaderDenormFlushToZeroFloat64( rhs.shaderDenormFlushToZeroFloat64 ) + , shaderRoundingModeRTEFloat16( rhs.shaderRoundingModeRTEFloat16 ) + , shaderRoundingModeRTEFloat32( rhs.shaderRoundingModeRTEFloat32 ) + , shaderRoundingModeRTEFloat64( rhs.shaderRoundingModeRTEFloat64 ) + , shaderRoundingModeRTZFloat16( rhs.shaderRoundingModeRTZFloat16 ) + , shaderRoundingModeRTZFloat32( rhs.shaderRoundingModeRTZFloat32 ) + , shaderRoundingModeRTZFloat64( rhs.shaderRoundingModeRTZFloat64 ) + {} + + PhysicalDeviceFloatControlsPropertiesKHR & operator=( PhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) - offsetof( PhysicalDeviceFloatControlsPropertiesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceFloatControlsPropertiesKHR( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFloatControlsPropertiesKHR& operator=( VkPhysicalDeviceFloatControlsPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceFloatControlsPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFloatControlsPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFloatControlsPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence ) + && ( roundingModeIndependence == rhs.roundingModeIndependence ) + && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 ) + && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 ) + && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 ) + && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 ) + && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 ) + && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 ) + && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 ) + && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 ) + && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 ) + && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 ) + && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 ) + && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 ) + && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 ) + && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 ) + && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 ); + } + + bool operator!=( PhysicalDeviceFloatControlsPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsPropertiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly; + VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR::e32BitOnly; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {}; + }; + static_assert( sizeof( PhysicalDeviceFloatControlsPropertiesKHR ) == sizeof( VkPhysicalDeviceFloatControlsPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFragmentDensityMapFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMap( fragmentDensityMap_ ) + , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) + , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fragmentDensityMap( rhs.fragmentDensityMap ) + , fragmentDensityMapDynamic( rhs.fragmentDensityMapDynamic ) + , fragmentDensityMapNonSubsampledImages( rhs.fragmentDensityMapNonSubsampledImages ) + {} + + PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFragmentDensityMapFeaturesEXT& operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentDensityMap == rhs.fragmentDensityMap ) + && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic ) + && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFragmentDensityMapPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {} ) VULKAN_HPP_NOEXCEPT + : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) + , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) + , fragmentDensityInvocations( fragmentDensityInvocations_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , minFragmentDensityTexelSize( rhs.minFragmentDensityTexelSize ) + , maxFragmentDensityTexelSize( rhs.maxFragmentDensityTexelSize ) + , fragmentDensityInvocations( rhs.fragmentDensityInvocations ) + {} + + PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) - offsetof( PhysicalDeviceFragmentDensityMapPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFragmentDensityMapPropertiesEXT& operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize ) + && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) + && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); + } + + bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fragmentShaderBarycentric( rhs.fragmentShaderBarycentric ) + {} + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) - offsetof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV& operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderBarycentric = fragmentShaderBarycentric_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); + } + + bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) + , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) + , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fragmentShaderSampleInterlock( rhs.fragmentShaderSampleInterlock ) + , fragmentShaderPixelInterlock( rhs.fragmentShaderPixelInterlock ) + , fragmentShaderShadingRateInterlock( rhs.fragmentShaderShadingRateInterlock ) + {} + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) - offsetof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT& operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_; + return *this; + } + + PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT + { + fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_; + return *this; + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock ) + && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock ) + && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock ); + } + + bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {}; + }; + static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceGroupProperties + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( uint32_t physicalDeviceCount_ = {}, + std::array const& physicalDevices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {} ) VULKAN_HPP_NOEXCEPT + : physicalDeviceCount( physicalDeviceCount_ ) + , physicalDevices{} + , subsetAllocation( subsetAllocation_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( physicalDevices, physicalDevices_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , physicalDeviceCount( rhs.physicalDeviceCount ) + , physicalDevices{} + , subsetAllocation( rhs.subsetAllocation ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( physicalDevices, rhs.physicalDevices ); + } + + PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceGroupProperties ) - offsetof( PhysicalDeviceGroupProperties, pNext ) ); + return *this; + } + + PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceGroupProperties& operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( physicalDeviceCount == rhs.physicalDeviceCount ) + && ( memcmp( physicalDevices, rhs.physicalDevices, std::min( VK_MAX_DEVICE_GROUP_SIZE, physicalDeviceCount ) * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ) == 0 ) + && ( subsetAllocation == rhs.subsetAllocation ); + } + + bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties; + void* pNext = {}; + uint32_t physicalDeviceCount = {}; + VULKAN_HPP_NAMESPACE::PhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE] = {}; + VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {}; + }; + static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceHostQueryResetFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {} ) VULKAN_HPP_NOEXCEPT + : hostQueryReset( hostQueryReset_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeaturesEXT( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , hostQueryReset( rhs.hostQueryReset ) + {} + + PhysicalDeviceHostQueryResetFeaturesEXT & operator=( PhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) - offsetof( PhysicalDeviceHostQueryResetFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceHostQueryResetFeaturesEXT( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceHostQueryResetFeaturesEXT& operator=( VkPhysicalDeviceHostQueryResetFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceHostQueryResetFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceHostQueryResetFeaturesEXT & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT + { + hostQueryReset = hostQueryReset_; + return *this; + } + + operator VkPhysicalDeviceHostQueryResetFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHostQueryResetFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceHostQueryResetFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( hostQueryReset == rhs.hostQueryReset ); + } + + bool operator!=( PhysicalDeviceHostQueryResetFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {}; + }; + static_assert( sizeof( PhysicalDeviceHostQueryResetFeaturesEXT ) == sizeof( VkPhysicalDeviceHostQueryResetFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceIDProperties + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( std::array const& deviceUUID_ = {}, + std::array const& driverUUID_ = {}, + std::array const& deviceLUID_ = {}, + uint32_t deviceNodeMask_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {} ) VULKAN_HPP_NOEXCEPT + : deviceUUID{} + , driverUUID{} + , deviceLUID{} + , deviceNodeMask( deviceNodeMask_ ) + , deviceLUIDValid( deviceLUIDValid_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceUUID, deviceUUID_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverUUID, driverUUID_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceLUID, deviceLUID_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , deviceUUID{} + , driverUUID{} + , deviceLUID{} + , deviceNodeMask( rhs.deviceNodeMask ) + , deviceLUIDValid( rhs.deviceLUIDValid ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceUUID, rhs.deviceUUID ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( driverUUID, rhs.driverUUID ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceLUID, rhs.deviceLUID ); + } + + PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIDProperties ) - offsetof( PhysicalDeviceIDProperties, pNext ) ); + return *this; + } + + PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceIDProperties& operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceIDProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( deviceUUID, rhs.deviceUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( driverUUID, rhs.driverUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( memcmp( deviceLUID, rhs.deviceLUID, VK_LUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( deviceNodeMask == rhs.deviceNodeMask ) + && ( deviceLUIDValid == rhs.deviceLUIDValid ); + } + + bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties; + void* pNext = {}; + uint8_t deviceUUID[VK_UUID_SIZE] = {}; + uint8_t driverUUID[VK_UUID_SIZE] = {}; + uint8_t deviceLUID[VK_LUID_SIZE] = {}; + uint32_t deviceNodeMask = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {}; + }; + static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceImageDrmFormatModifierInfoEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t* pQueueFamilyIndices_ = {} ) VULKAN_HPP_NOEXCEPT + : drmFormatModifier( drmFormatModifier_ ) + , sharingMode( sharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , drmFormatModifier( rhs.drmFormatModifier ) + , sharingMode( rhs.sharingMode ) + , queueFamilyIndexCount( rhs.queueFamilyIndexCount ) + , pQueueFamilyIndices( rhs.pQueueFamilyIndices ) + {} + + PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) - offsetof( PhysicalDeviceImageDrmFormatModifierInfoEXT, pNext ) ); + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT& operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT + { + drmFormatModifier = drmFormatModifier_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT + { + sharingMode = sharingMode_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( drmFormatModifier == rhs.drmFormatModifier ) + && ( sharingMode == rhs.sharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ); + } + + bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT; + const void* pNext = {}; + uint64_t drmFormatModifier = {}; + VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t* pQueueFamilyIndices = {}; + }; + static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceImageFormatInfo2 + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , tiling( tiling_ ) + , usage( usage_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , format( rhs.format ) + , type( rhs.type ) + , tiling( rhs.tiling ) + , usage( rhs.usage ) + , flags( rhs.flags ) + {} + + PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageFormatInfo2 ) - offsetof( PhysicalDeviceImageFormatInfo2, pNext ) ); + return *this; + } + + PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceImageFormatInfo2& operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default; +#else + bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( tiling == rhs.tiling ) + && ( usage == rhs.usage ) + && ( flags == rhs.flags ); + } + + bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {}; + }; + static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceImageViewImageFormatInfoEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D ) VULKAN_HPP_NOEXCEPT + : imageViewType( imageViewType_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , imageViewType( rhs.imageViewType ) + {} + + PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) - offsetof( PhysicalDeviceImageViewImageFormatInfoEXT, pNext ) ); + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceImageViewImageFormatInfoEXT& operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT + { + imageViewType = imageViewType_; + return *this; + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageViewType == rhs.imageViewType ); + } + + bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D; + }; + static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceImagelessFramebufferFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {} ) VULKAN_HPP_NOEXCEPT + : imagelessFramebuffer( imagelessFramebuffer_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeaturesKHR( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , imagelessFramebuffer( rhs.imagelessFramebuffer ) + {} + + PhysicalDeviceImagelessFramebufferFeaturesKHR & operator=( PhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) - offsetof( PhysicalDeviceImagelessFramebufferFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceImagelessFramebufferFeaturesKHR( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceImagelessFramebufferFeaturesKHR& operator=( VkPhysicalDeviceImagelessFramebufferFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceImagelessFramebufferFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceImagelessFramebufferFeaturesKHR & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT + { + imagelessFramebuffer = imagelessFramebuffer_; + return *this; + } + + operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImagelessFramebufferFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceImagelessFramebufferFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imagelessFramebuffer == rhs.imagelessFramebuffer ); + } + + bool operator!=( PhysicalDeviceImagelessFramebufferFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {}; + }; + static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeaturesKHR ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceIndexTypeUint8FeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {} ) VULKAN_HPP_NOEXCEPT + : indexTypeUint8( indexTypeUint8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , indexTypeUint8( rhs.indexTypeUint8 ) + {} + + PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) - offsetof( PhysicalDeviceIndexTypeUint8FeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT& operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( indexTypeUint8 == rhs.indexTypeUint8 ); + } + + bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + }; + static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceInlineUniformBlockFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {} ) VULKAN_HPP_NOEXCEPT + : inlineUniformBlock( inlineUniformBlock_ ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , inlineUniformBlock( rhs.inlineUniformBlock ) + , descriptorBindingInlineUniformBlockUpdateAfterBind( rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ) + {} + + PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT& operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT + { + inlineUniformBlock = inlineUniformBlock_; + return *this; + } + + PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT + { + descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_; + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( inlineUniformBlock == rhs.inlineUniformBlock ) + && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {}; + VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceInlineUniformBlockPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( uint32_t maxInlineUniformBlockSize_ = {}, + uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {} ) VULKAN_HPP_NOEXCEPT + : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) + , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) + , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxInlineUniformBlockSize( rhs.maxInlineUniformBlockSize ) + , maxPerStageDescriptorInlineUniformBlocks( rhs.maxPerStageDescriptorInlineUniformBlocks ) + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) + , maxDescriptorSetInlineUniformBlocks( rhs.maxDescriptorSetInlineUniformBlocks ) + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ) + {} + + PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) - offsetof( PhysicalDeviceInlineUniformBlockPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceInlineUniformBlockPropertiesEXT& operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize ) + && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks ) + && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks ) + && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks ) + && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks ); + } + + bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT; + void* pNext = {}; + uint32_t maxInlineUniformBlockSize = {}; + uint32_t maxPerStageDescriptorInlineUniformBlocks = {}; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {}; + uint32_t maxDescriptorSetInlineUniformBlocks = {}; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {}; + }; + static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceLimits + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( uint32_t maxImageDimension1D_ = {}, + uint32_t maxImageDimension2D_ = {}, + uint32_t maxImageDimension3D_ = {}, + uint32_t maxImageDimensionCube_ = {}, + uint32_t maxImageArrayLayers_ = {}, + uint32_t maxTexelBufferElements_ = {}, + uint32_t maxUniformBufferRange_ = {}, + uint32_t maxStorageBufferRange_ = {}, + uint32_t maxPushConstantsSize_ = {}, + uint32_t maxMemoryAllocationCount_ = {}, + uint32_t maxSamplerAllocationCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, + uint32_t maxBoundDescriptorSets_ = {}, + uint32_t maxPerStageDescriptorSamplers_ = {}, + uint32_t maxPerStageDescriptorUniformBuffers_ = {}, + uint32_t maxPerStageDescriptorStorageBuffers_ = {}, + uint32_t maxPerStageDescriptorSampledImages_ = {}, + uint32_t maxPerStageDescriptorStorageImages_ = {}, + uint32_t maxPerStageDescriptorInputAttachments_ = {}, + uint32_t maxPerStageResources_ = {}, + uint32_t maxDescriptorSetSamplers_ = {}, + uint32_t maxDescriptorSetUniformBuffers_ = {}, + uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetStorageBuffers_ = {}, + uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetSampledImages_ = {}, + uint32_t maxDescriptorSetStorageImages_ = {}, + uint32_t maxDescriptorSetInputAttachments_ = {}, + uint32_t maxVertexInputAttributes_ = {}, + uint32_t maxVertexInputBindings_ = {}, + uint32_t maxVertexInputAttributeOffset_ = {}, + uint32_t maxVertexInputBindingStride_ = {}, + uint32_t maxVertexOutputComponents_ = {}, + uint32_t maxTessellationGenerationLevel_ = {}, + uint32_t maxTessellationPatchSize_ = {}, + uint32_t maxTessellationControlPerVertexInputComponents_ = {}, + uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, + uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, + uint32_t maxTessellationControlTotalOutputComponents_ = {}, + uint32_t maxTessellationEvaluationInputComponents_ = {}, + uint32_t maxTessellationEvaluationOutputComponents_ = {}, + uint32_t maxGeometryShaderInvocations_ = {}, + uint32_t maxGeometryInputComponents_ = {}, + uint32_t maxGeometryOutputComponents_ = {}, + uint32_t maxGeometryOutputVertices_ = {}, + uint32_t maxGeometryTotalOutputComponents_ = {}, + uint32_t maxFragmentInputComponents_ = {}, + uint32_t maxFragmentOutputAttachments_ = {}, + uint32_t maxFragmentDualSrcAttachments_ = {}, + uint32_t maxFragmentCombinedOutputResources_ = {}, + uint32_t maxComputeSharedMemorySize_ = {}, + std::array const& maxComputeWorkGroupCount_ = {}, + uint32_t maxComputeWorkGroupInvocations_ = {}, + std::array const& maxComputeWorkGroupSize_ = {}, + uint32_t subPixelPrecisionBits_ = {}, + uint32_t subTexelPrecisionBits_ = {}, + uint32_t mipmapPrecisionBits_ = {}, + uint32_t maxDrawIndexedIndexValue_ = {}, + uint32_t maxDrawIndirectCount_ = {}, + float maxSamplerLodBias_ = {}, + float maxSamplerAnisotropy_ = {}, + uint32_t maxViewports_ = {}, + std::array const& maxViewportDimensions_ = {}, + std::array const& viewportBoundsRange_ = {}, + uint32_t viewportSubPixelBits_ = {}, + size_t minMemoryMapAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, + int32_t minTexelOffset_ = {}, + uint32_t maxTexelOffset_ = {}, + int32_t minTexelGatherOffset_ = {}, + uint32_t maxTexelGatherOffset_ = {}, + float minInterpolationOffset_ = {}, + float maxInterpolationOffset_ = {}, + uint32_t subPixelInterpolationOffsetBits_ = {}, + uint32_t maxFramebufferWidth_ = {}, + uint32_t maxFramebufferHeight_ = {}, + uint32_t maxFramebufferLayers_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, + uint32_t maxColorAttachments_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, + uint32_t maxSampleMaskWords_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, + float timestampPeriod_ = {}, + uint32_t maxClipDistances_ = {}, + uint32_t maxCullDistances_ = {}, + uint32_t maxCombinedClipAndCullDistances_ = {}, + uint32_t discreteQueuePriorities_ = {}, + std::array const& pointSizeRange_ = {}, + std::array const& lineWidthRange_ = {}, + float pointSizeGranularity_ = {}, + float lineWidthGranularity_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( maxImageDimension1D_ ) + , maxImageDimension2D( maxImageDimension2D_ ) + , maxImageDimension3D( maxImageDimension3D_ ) + , maxImageDimensionCube( maxImageDimensionCube_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , maxTexelBufferElements( maxTexelBufferElements_ ) + , maxUniformBufferRange( maxUniformBufferRange_ ) + , maxStorageBufferRange( maxStorageBufferRange_ ) + , maxPushConstantsSize( maxPushConstantsSize_ ) + , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) + , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) + , bufferImageGranularity( bufferImageGranularity_ ) + , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) + , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) + , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) + , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) + , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) + , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) + , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) + , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) + , maxPerStageResources( maxPerStageResources_ ) + , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) + , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) + , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) + , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) + , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) + , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) + , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) + , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) + , maxVertexInputAttributes( maxVertexInputAttributes_ ) + , maxVertexInputBindings( maxVertexInputBindings_ ) + , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) + , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) + , maxVertexOutputComponents( maxVertexOutputComponents_ ) + , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) + , maxTessellationPatchSize( maxTessellationPatchSize_ ) + , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) + , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) + , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) + , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) + , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) + , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) + , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) + , maxGeometryInputComponents( maxGeometryInputComponents_ ) + , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) + , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) + , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) + , maxFragmentInputComponents( maxFragmentInputComponents_ ) + , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) + , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) + , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) + , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) + , maxComputeWorkGroupCount{} + , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) + , maxComputeWorkGroupSize{} + , subPixelPrecisionBits( subPixelPrecisionBits_ ) + , subTexelPrecisionBits( subTexelPrecisionBits_ ) + , mipmapPrecisionBits( mipmapPrecisionBits_ ) + , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) + , maxDrawIndirectCount( maxDrawIndirectCount_ ) + , maxSamplerLodBias( maxSamplerLodBias_ ) + , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) + , maxViewports( maxViewports_ ) + , maxViewportDimensions{} + , viewportBoundsRange{} + , viewportSubPixelBits( viewportSubPixelBits_ ) + , minMemoryMapAlignment( minMemoryMapAlignment_ ) + , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) + , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) + , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) + , minTexelOffset( minTexelOffset_ ) + , maxTexelOffset( maxTexelOffset_ ) + , minTexelGatherOffset( minTexelGatherOffset_ ) + , maxTexelGatherOffset( maxTexelGatherOffset_ ) + , minInterpolationOffset( minInterpolationOffset_ ) + , maxInterpolationOffset( maxInterpolationOffset_ ) + , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) + , maxFramebufferWidth( maxFramebufferWidth_ ) + , maxFramebufferHeight( maxFramebufferHeight_ ) + , maxFramebufferLayers( maxFramebufferLayers_ ) + , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) + , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) + , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) + , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) + , maxColorAttachments( maxColorAttachments_ ) + , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) + , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) + , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) + , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) + , storageImageSampleCounts( storageImageSampleCounts_ ) + , maxSampleMaskWords( maxSampleMaskWords_ ) + , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) + , timestampPeriod( timestampPeriod_ ) + , maxClipDistances( maxClipDistances_ ) + , maxCullDistances( maxCullDistances_ ) + , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) + , discreteQueuePriorities( discreteQueuePriorities_ ) + , pointSizeRange{} + , lineWidthRange{} + , pointSizeGranularity( pointSizeGranularity_ ) + , lineWidthGranularity( lineWidthGranularity_ ) + , strictLines( strictLines_ ) + , standardSampleLocations( standardSampleLocations_ ) + , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) + , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) + , nonCoherentAtomSize( nonCoherentAtomSize_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupCount, maxComputeWorkGroupCount_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupSize, maxComputeWorkGroupSize_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxViewportDimensions, maxViewportDimensions_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( viewportBoundsRange, viewportBoundsRange_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pointSizeRange, pointSizeRange_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( lineWidthRange, lineWidthRange_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const& rhs ) VULKAN_HPP_NOEXCEPT + : maxImageDimension1D( rhs.maxImageDimension1D ) + , maxImageDimension2D( rhs.maxImageDimension2D ) + , maxImageDimension3D( rhs.maxImageDimension3D ) + , maxImageDimensionCube( rhs.maxImageDimensionCube ) + , maxImageArrayLayers( rhs.maxImageArrayLayers ) + , maxTexelBufferElements( rhs.maxTexelBufferElements ) + , maxUniformBufferRange( rhs.maxUniformBufferRange ) + , maxStorageBufferRange( rhs.maxStorageBufferRange ) + , maxPushConstantsSize( rhs.maxPushConstantsSize ) + , maxMemoryAllocationCount( rhs.maxMemoryAllocationCount ) + , maxSamplerAllocationCount( rhs.maxSamplerAllocationCount ) + , bufferImageGranularity( rhs.bufferImageGranularity ) + , sparseAddressSpaceSize( rhs.sparseAddressSpaceSize ) + , maxBoundDescriptorSets( rhs.maxBoundDescriptorSets ) + , maxPerStageDescriptorSamplers( rhs.maxPerStageDescriptorSamplers ) + , maxPerStageDescriptorUniformBuffers( rhs.maxPerStageDescriptorUniformBuffers ) + , maxPerStageDescriptorStorageBuffers( rhs.maxPerStageDescriptorStorageBuffers ) + , maxPerStageDescriptorSampledImages( rhs.maxPerStageDescriptorSampledImages ) + , maxPerStageDescriptorStorageImages( rhs.maxPerStageDescriptorStorageImages ) + , maxPerStageDescriptorInputAttachments( rhs.maxPerStageDescriptorInputAttachments ) + , maxPerStageResources( rhs.maxPerStageResources ) + , maxDescriptorSetSamplers( rhs.maxDescriptorSetSamplers ) + , maxDescriptorSetUniformBuffers( rhs.maxDescriptorSetUniformBuffers ) + , maxDescriptorSetUniformBuffersDynamic( rhs.maxDescriptorSetUniformBuffersDynamic ) + , maxDescriptorSetStorageBuffers( rhs.maxDescriptorSetStorageBuffers ) + , maxDescriptorSetStorageBuffersDynamic( rhs.maxDescriptorSetStorageBuffersDynamic ) + , maxDescriptorSetSampledImages( rhs.maxDescriptorSetSampledImages ) + , maxDescriptorSetStorageImages( rhs.maxDescriptorSetStorageImages ) + , maxDescriptorSetInputAttachments( rhs.maxDescriptorSetInputAttachments ) + , maxVertexInputAttributes( rhs.maxVertexInputAttributes ) + , maxVertexInputBindings( rhs.maxVertexInputBindings ) + , maxVertexInputAttributeOffset( rhs.maxVertexInputAttributeOffset ) + , maxVertexInputBindingStride( rhs.maxVertexInputBindingStride ) + , maxVertexOutputComponents( rhs.maxVertexOutputComponents ) + , maxTessellationGenerationLevel( rhs.maxTessellationGenerationLevel ) + , maxTessellationPatchSize( rhs.maxTessellationPatchSize ) + , maxTessellationControlPerVertexInputComponents( rhs.maxTessellationControlPerVertexInputComponents ) + , maxTessellationControlPerVertexOutputComponents( rhs.maxTessellationControlPerVertexOutputComponents ) + , maxTessellationControlPerPatchOutputComponents( rhs.maxTessellationControlPerPatchOutputComponents ) + , maxTessellationControlTotalOutputComponents( rhs.maxTessellationControlTotalOutputComponents ) + , maxTessellationEvaluationInputComponents( rhs.maxTessellationEvaluationInputComponents ) + , maxTessellationEvaluationOutputComponents( rhs.maxTessellationEvaluationOutputComponents ) + , maxGeometryShaderInvocations( rhs.maxGeometryShaderInvocations ) + , maxGeometryInputComponents( rhs.maxGeometryInputComponents ) + , maxGeometryOutputComponents( rhs.maxGeometryOutputComponents ) + , maxGeometryOutputVertices( rhs.maxGeometryOutputVertices ) + , maxGeometryTotalOutputComponents( rhs.maxGeometryTotalOutputComponents ) + , maxFragmentInputComponents( rhs.maxFragmentInputComponents ) + , maxFragmentOutputAttachments( rhs.maxFragmentOutputAttachments ) + , maxFragmentDualSrcAttachments( rhs.maxFragmentDualSrcAttachments ) + , maxFragmentCombinedOutputResources( rhs.maxFragmentCombinedOutputResources ) + , maxComputeSharedMemorySize( rhs.maxComputeSharedMemorySize ) + , maxComputeWorkGroupCount{} + , maxComputeWorkGroupInvocations( rhs.maxComputeWorkGroupInvocations ) + , maxComputeWorkGroupSize{} + , subPixelPrecisionBits( rhs.subPixelPrecisionBits ) + , subTexelPrecisionBits( rhs.subTexelPrecisionBits ) + , mipmapPrecisionBits( rhs.mipmapPrecisionBits ) + , maxDrawIndexedIndexValue( rhs.maxDrawIndexedIndexValue ) + , maxDrawIndirectCount( rhs.maxDrawIndirectCount ) + , maxSamplerLodBias( rhs.maxSamplerLodBias ) + , maxSamplerAnisotropy( rhs.maxSamplerAnisotropy ) + , maxViewports( rhs.maxViewports ) + , maxViewportDimensions{} + , viewportBoundsRange{} + , viewportSubPixelBits( rhs.viewportSubPixelBits ) + , minMemoryMapAlignment( rhs.minMemoryMapAlignment ) + , minTexelBufferOffsetAlignment( rhs.minTexelBufferOffsetAlignment ) + , minUniformBufferOffsetAlignment( rhs.minUniformBufferOffsetAlignment ) + , minStorageBufferOffsetAlignment( rhs.minStorageBufferOffsetAlignment ) + , minTexelOffset( rhs.minTexelOffset ) + , maxTexelOffset( rhs.maxTexelOffset ) + , minTexelGatherOffset( rhs.minTexelGatherOffset ) + , maxTexelGatherOffset( rhs.maxTexelGatherOffset ) + , minInterpolationOffset( rhs.minInterpolationOffset ) + , maxInterpolationOffset( rhs.maxInterpolationOffset ) + , subPixelInterpolationOffsetBits( rhs.subPixelInterpolationOffsetBits ) + , maxFramebufferWidth( rhs.maxFramebufferWidth ) + , maxFramebufferHeight( rhs.maxFramebufferHeight ) + , maxFramebufferLayers( rhs.maxFramebufferLayers ) + , framebufferColorSampleCounts( rhs.framebufferColorSampleCounts ) + , framebufferDepthSampleCounts( rhs.framebufferDepthSampleCounts ) + , framebufferStencilSampleCounts( rhs.framebufferStencilSampleCounts ) + , framebufferNoAttachmentsSampleCounts( rhs.framebufferNoAttachmentsSampleCounts ) + , maxColorAttachments( rhs.maxColorAttachments ) + , sampledImageColorSampleCounts( rhs.sampledImageColorSampleCounts ) + , sampledImageIntegerSampleCounts( rhs.sampledImageIntegerSampleCounts ) + , sampledImageDepthSampleCounts( rhs.sampledImageDepthSampleCounts ) + , sampledImageStencilSampleCounts( rhs.sampledImageStencilSampleCounts ) + , storageImageSampleCounts( rhs.storageImageSampleCounts ) + , maxSampleMaskWords( rhs.maxSampleMaskWords ) + , timestampComputeAndGraphics( rhs.timestampComputeAndGraphics ) + , timestampPeriod( rhs.timestampPeriod ) + , maxClipDistances( rhs.maxClipDistances ) + , maxCullDistances( rhs.maxCullDistances ) + , maxCombinedClipAndCullDistances( rhs.maxCombinedClipAndCullDistances ) + , discreteQueuePriorities( rhs.discreteQueuePriorities ) + , pointSizeRange{} + , lineWidthRange{} + , pointSizeGranularity( rhs.pointSizeGranularity ) + , lineWidthGranularity( rhs.lineWidthGranularity ) + , strictLines( rhs.strictLines ) + , standardSampleLocations( rhs.standardSampleLocations ) + , optimalBufferCopyOffsetAlignment( rhs.optimalBufferCopyOffsetAlignment ) + , optimalBufferCopyRowPitchAlignment( rhs.optimalBufferCopyRowPitchAlignment ) + , nonCoherentAtomSize( rhs.nonCoherentAtomSize ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxViewportDimensions, rhs.maxViewportDimensions ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( viewportBoundsRange, rhs.viewportBoundsRange ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pointSizeRange, rhs.pointSizeRange ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( lineWidthRange, rhs.lineWidthRange ); + } + + PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PhysicalDeviceLimits ) ); + return *this; + } + + PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceLimits& operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLimits const& ) const = default; +#else + bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( maxImageDimension1D == rhs.maxImageDimension1D ) + && ( maxImageDimension2D == rhs.maxImageDimension2D ) + && ( maxImageDimension3D == rhs.maxImageDimension3D ) + && ( maxImageDimensionCube == rhs.maxImageDimensionCube ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( maxTexelBufferElements == rhs.maxTexelBufferElements ) + && ( maxUniformBufferRange == rhs.maxUniformBufferRange ) + && ( maxStorageBufferRange == rhs.maxStorageBufferRange ) + && ( maxPushConstantsSize == rhs.maxPushConstantsSize ) + && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount ) + && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount ) + && ( bufferImageGranularity == rhs.bufferImageGranularity ) + && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize ) + && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets ) + && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers ) + && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers ) + && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers ) + && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages ) + && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages ) + && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments ) + && ( maxPerStageResources == rhs.maxPerStageResources ) + && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers ) + && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers ) + && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic ) + && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers ) + && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic ) + && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages ) + && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages ) + && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments ) + && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes ) + && ( maxVertexInputBindings == rhs.maxVertexInputBindings ) + && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset ) + && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride ) + && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents ) + && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel ) + && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize ) + && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents ) + && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents ) + && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents ) + && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents ) + && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents ) + && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents ) + && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations ) + && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents ) + && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents ) + && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices ) + && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents ) + && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents ) + && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments ) + && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments ) + && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources ) + && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize ) + && ( memcmp( maxComputeWorkGroupCount, rhs.maxComputeWorkGroupCount, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations ) + && ( memcmp( maxComputeWorkGroupSize, rhs.maxComputeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits ) + && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits ) + && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits ) + && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue ) + && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount ) + && ( maxSamplerLodBias == rhs.maxSamplerLodBias ) + && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy ) + && ( maxViewports == rhs.maxViewports ) + && ( memcmp( maxViewportDimensions, rhs.maxViewportDimensions, 2 * sizeof( uint32_t ) ) == 0 ) + && ( memcmp( viewportBoundsRange, rhs.viewportBoundsRange, 2 * sizeof( float ) ) == 0 ) + && ( viewportSubPixelBits == rhs.viewportSubPixelBits ) + && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment ) + && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment ) + && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment ) + && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment ) + && ( minTexelOffset == rhs.minTexelOffset ) + && ( maxTexelOffset == rhs.maxTexelOffset ) + && ( minTexelGatherOffset == rhs.minTexelGatherOffset ) + && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset ) + && ( minInterpolationOffset == rhs.minInterpolationOffset ) + && ( maxInterpolationOffset == rhs.maxInterpolationOffset ) + && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits ) + && ( maxFramebufferWidth == rhs.maxFramebufferWidth ) + && ( maxFramebufferHeight == rhs.maxFramebufferHeight ) + && ( maxFramebufferLayers == rhs.maxFramebufferLayers ) + && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts ) + && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts ) + && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts ) + && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts ) + && ( maxColorAttachments == rhs.maxColorAttachments ) + && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts ) + && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts ) + && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts ) + && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts ) + && ( storageImageSampleCounts == rhs.storageImageSampleCounts ) + && ( maxSampleMaskWords == rhs.maxSampleMaskWords ) + && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics ) + && ( timestampPeriod == rhs.timestampPeriod ) + && ( maxClipDistances == rhs.maxClipDistances ) + && ( maxCullDistances == rhs.maxCullDistances ) + && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances ) + && ( discreteQueuePriorities == rhs.discreteQueuePriorities ) + && ( memcmp( pointSizeRange, rhs.pointSizeRange, 2 * sizeof( float ) ) == 0 ) + && ( memcmp( lineWidthRange, rhs.lineWidthRange, 2 * sizeof( float ) ) == 0 ) + && ( pointSizeGranularity == rhs.pointSizeGranularity ) + && ( lineWidthGranularity == rhs.lineWidthGranularity ) + && ( strictLines == rhs.strictLines ) + && ( standardSampleLocations == rhs.standardSampleLocations ) + && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment ) + && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment ) + && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize ); + } + + bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t maxImageDimension1D = {}; + uint32_t maxImageDimension2D = {}; + uint32_t maxImageDimension3D = {}; + uint32_t maxImageDimensionCube = {}; + uint32_t maxImageArrayLayers = {}; + uint32_t maxTexelBufferElements = {}; + uint32_t maxUniformBufferRange = {}; + uint32_t maxStorageBufferRange = {}; + uint32_t maxPushConstantsSize = {}; + uint32_t maxMemoryAllocationCount = {}; + uint32_t maxSamplerAllocationCount = {}; + VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {}; + uint32_t maxBoundDescriptorSets = {}; + uint32_t maxPerStageDescriptorSamplers = {}; + uint32_t maxPerStageDescriptorUniformBuffers = {}; + uint32_t maxPerStageDescriptorStorageBuffers = {}; + uint32_t maxPerStageDescriptorSampledImages = {}; + uint32_t maxPerStageDescriptorStorageImages = {}; + uint32_t maxPerStageDescriptorInputAttachments = {}; + uint32_t maxPerStageResources = {}; + uint32_t maxDescriptorSetSamplers = {}; + uint32_t maxDescriptorSetUniformBuffers = {}; + uint32_t maxDescriptorSetUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetStorageBuffers = {}; + uint32_t maxDescriptorSetStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetSampledImages = {}; + uint32_t maxDescriptorSetStorageImages = {}; + uint32_t maxDescriptorSetInputAttachments = {}; + uint32_t maxVertexInputAttributes = {}; + uint32_t maxVertexInputBindings = {}; + uint32_t maxVertexInputAttributeOffset = {}; + uint32_t maxVertexInputBindingStride = {}; + uint32_t maxVertexOutputComponents = {}; + uint32_t maxTessellationGenerationLevel = {}; + uint32_t maxTessellationPatchSize = {}; + uint32_t maxTessellationControlPerVertexInputComponents = {}; + uint32_t maxTessellationControlPerVertexOutputComponents = {}; + uint32_t maxTessellationControlPerPatchOutputComponents = {}; + uint32_t maxTessellationControlTotalOutputComponents = {}; + uint32_t maxTessellationEvaluationInputComponents = {}; + uint32_t maxTessellationEvaluationOutputComponents = {}; + uint32_t maxGeometryShaderInvocations = {}; + uint32_t maxGeometryInputComponents = {}; + uint32_t maxGeometryOutputComponents = {}; + uint32_t maxGeometryOutputVertices = {}; + uint32_t maxGeometryTotalOutputComponents = {}; + uint32_t maxFragmentInputComponents = {}; + uint32_t maxFragmentOutputAttachments = {}; + uint32_t maxFragmentDualSrcAttachments = {}; + uint32_t maxFragmentCombinedOutputResources = {}; + uint32_t maxComputeSharedMemorySize = {}; + uint32_t maxComputeWorkGroupCount[3] = {}; + uint32_t maxComputeWorkGroupInvocations = {}; + uint32_t maxComputeWorkGroupSize[3] = {}; + uint32_t subPixelPrecisionBits = {}; + uint32_t subTexelPrecisionBits = {}; + uint32_t mipmapPrecisionBits = {}; + uint32_t maxDrawIndexedIndexValue = {}; + uint32_t maxDrawIndirectCount = {}; + float maxSamplerLodBias = {}; + float maxSamplerAnisotropy = {}; + uint32_t maxViewports = {}; + uint32_t maxViewportDimensions[2] = {}; + float viewportBoundsRange[2] = {}; + uint32_t viewportSubPixelBits = {}; + size_t minMemoryMapAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {}; + int32_t minTexelOffset = {}; + uint32_t maxTexelOffset = {}; + int32_t minTexelGatherOffset = {}; + uint32_t maxTexelGatherOffset = {}; + float minInterpolationOffset = {}; + float maxInterpolationOffset = {}; + uint32_t subPixelInterpolationOffsetBits = {}; + uint32_t maxFramebufferWidth = {}; + uint32_t maxFramebufferHeight = {}; + uint32_t maxFramebufferLayers = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {}; + uint32_t maxColorAttachments = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {}; + uint32_t maxSampleMaskWords = {}; + VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {}; + float timestampPeriod = {}; + uint32_t maxClipDistances = {}; + uint32_t maxCullDistances = {}; + uint32_t maxCombinedClipAndCullDistances = {}; + uint32_t discreteQueuePriorities = {}; + float pointSizeRange[2] = {}; + float lineWidthRange[2] = {}; + float pointSizeGranularity = {}; + float lineWidthGranularity = {}; + VULKAN_HPP_NAMESPACE::Bool32 strictLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; + }; + static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceLineRasterizationFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangularLines( rectangularLines_ ) + , bresenhamLines( bresenhamLines_ ) + , smoothLines( smoothLines_ ) + , stippledRectangularLines( stippledRectangularLines_ ) + , stippledBresenhamLines( stippledBresenhamLines_ ) + , stippledSmoothLines( stippledSmoothLines_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , rectangularLines( rhs.rectangularLines ) + , bresenhamLines( rhs.bresenhamLines ) + , smoothLines( rhs.smoothLines ) + , stippledRectangularLines( rhs.stippledRectangularLines ) + , stippledBresenhamLines( rhs.stippledBresenhamLines ) + , stippledSmoothLines( rhs.stippledSmoothLines ) + {} + + PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) - offsetof( PhysicalDeviceLineRasterizationFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceLineRasterizationFeaturesEXT& operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rectangularLines == rhs.rectangularLines ) + && ( bresenhamLines == rhs.bresenhamLines ) + && ( smoothLines == rhs.smoothLines ) + && ( stippledRectangularLines == rhs.stippledRectangularLines ) + && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) + && ( stippledSmoothLines == rhs.stippledSmoothLines ); + } + + bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceLineRasterizationPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( uint32_t lineSubPixelPrecisionBits_ = {} ) VULKAN_HPP_NOEXCEPT + : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , lineSubPixelPrecisionBits( rhs.lineSubPixelPrecisionBits ) + {} + + PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) - offsetof( PhysicalDeviceLineRasterizationPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceLineRasterizationPropertiesEXT& operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ); + } + + bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT; + void* pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + }; + static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMaintenance3Properties + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPerSetDescriptors( maxPerSetDescriptors_ ) + , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxPerSetDescriptors( rhs.maxPerSetDescriptors ) + , maxMemoryAllocationSize( rhs.maxMemoryAllocationSize ) + {} + + PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMaintenance3Properties ) - offsetof( PhysicalDeviceMaintenance3Properties, pNext ) ); + return *this; + } + + PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMaintenance3Properties& operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors ) + && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); + } + + bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; + void* pNext = {}; + uint32_t maxPerSetDescriptors = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {}; + }; + static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryBudgetPropertiesEXT + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const& heapBudget_ = {}, + std::array const& heapUsage_ = {} ) VULKAN_HPP_NOEXCEPT + : heapBudget{} + , heapUsage{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapBudget, heapBudget_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapUsage, heapUsage_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , heapBudget{} + , heapUsage{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapBudget, rhs.heapBudget ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( heapUsage, rhs.heapUsage ); + } + + PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) - offsetof( PhysicalDeviceMemoryBudgetPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMemoryBudgetPropertiesEXT& operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( heapBudget, rhs.heapBudget, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 ) + && ( memcmp( heapUsage, rhs.heapUsage, VK_MAX_MEMORY_HEAPS * sizeof( VULKAN_HPP_NAMESPACE::DeviceSize ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize heapBudget[VK_MAX_MEMORY_HEAPS] = {}; + VULKAN_HPP_NAMESPACE::DeviceSize heapUsage[VK_MAX_MEMORY_HEAPS] = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryPriorityFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryPriority( memoryPriority_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryPriority( rhs.memoryPriority ) + {} + + PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) - offsetof( PhysicalDeviceMemoryPriorityFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT& operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT + { + memoryPriority = memoryPriority_; + return *this; + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryPriority == rhs.memoryPriority ); + } + + bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( uint32_t memoryTypeCount_ = {}, + std::array const& memoryTypes_ = {}, + uint32_t memoryHeapCount_ = {}, + std::array const& memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( memoryTypeCount_ ) + , memoryTypes{} + , memoryHeapCount( memoryHeapCount_ ) + , memoryHeaps{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryTypes, memoryTypes_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryHeaps, memoryHeaps_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : memoryTypeCount( rhs.memoryTypeCount ) + , memoryTypes{} + , memoryHeapCount( rhs.memoryHeapCount ) + , memoryHeaps{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryTypes, rhs.memoryTypes ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( memoryHeaps, rhs.memoryHeaps ); + } + + PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PhysicalDeviceMemoryProperties ) ); + return *this; + } + + PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMemoryProperties& operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( memoryTypeCount == rhs.memoryTypeCount ) + && ( memcmp( memoryTypes, rhs.memoryTypes, std::min( VK_MAX_MEMORY_TYPES, memoryTypeCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ) == 0 ) + && ( memoryHeapCount == rhs.memoryHeapCount ) + && ( memcmp( memoryHeaps, rhs.memoryHeaps, std::min( VK_MAX_MEMORY_HEAPS, memoryHeapCount ) * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ) == 0 ); + } + + bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t memoryTypeCount = {}; + VULKAN_HPP_NAMESPACE::MemoryType memoryTypes[VK_MAX_MEMORY_TYPES] = {}; + uint32_t memoryHeapCount = {}; + VULKAN_HPP_NAMESPACE::MemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS] = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMemoryProperties2 + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryProperties( memoryProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryProperties( rhs.memoryProperties ) + {} + + PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMemoryProperties2 ) - offsetof( PhysicalDeviceMemoryProperties2, pNext ) ); + return *this; + } + + PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMemoryProperties2& operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default; +#else + bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryProperties == rhs.memoryProperties ); + } + + bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {}; + }; + static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMeshShaderFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {} ) VULKAN_HPP_NOEXCEPT + : taskShader( taskShader_ ) + , meshShader( meshShader_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , taskShader( rhs.taskShader ) + , meshShader( rhs.meshShader ) + {} + + PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderFeaturesNV ) - offsetof( PhysicalDeviceMeshShaderFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMeshShaderFeaturesNV& operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT + { + taskShader = taskShader_; + return *this; + } + + PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT + { + meshShader = meshShader_; + return *this; + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( taskShader == rhs.taskShader ) + && ( meshShader == rhs.meshShader ); + } + + bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 taskShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMeshShaderPropertiesNV + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( uint32_t maxDrawMeshTasksCount_ = {}, + uint32_t maxTaskWorkGroupInvocations_ = {}, + std::array const& maxTaskWorkGroupSize_ = {}, + uint32_t maxTaskTotalMemorySize_ = {}, + uint32_t maxTaskOutputCount_ = {}, + uint32_t maxMeshWorkGroupInvocations_ = {}, + std::array const& maxMeshWorkGroupSize_ = {}, + uint32_t maxMeshTotalMemorySize_ = {}, + uint32_t maxMeshOutputVertices_ = {}, + uint32_t maxMeshOutputPrimitives_ = {}, + uint32_t maxMeshMultiviewViewCount_ = {}, + uint32_t meshOutputPerVertexGranularity_ = {}, + uint32_t meshOutputPerPrimitiveGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) + , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) + , maxTaskWorkGroupSize{} + , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) + , maxTaskOutputCount( maxTaskOutputCount_ ) + , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) + , maxMeshWorkGroupSize{} + , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) + , maxMeshOutputVertices( maxMeshOutputVertices_ ) + , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) + , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) + , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) + , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxTaskWorkGroupSize, maxTaskWorkGroupSize_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxMeshWorkGroupSize, maxMeshWorkGroupSize_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxDrawMeshTasksCount( rhs.maxDrawMeshTasksCount ) + , maxTaskWorkGroupInvocations( rhs.maxTaskWorkGroupInvocations ) + , maxTaskWorkGroupSize{} + , maxTaskTotalMemorySize( rhs.maxTaskTotalMemorySize ) + , maxTaskOutputCount( rhs.maxTaskOutputCount ) + , maxMeshWorkGroupInvocations( rhs.maxMeshWorkGroupInvocations ) + , maxMeshWorkGroupSize{} + , maxMeshTotalMemorySize( rhs.maxMeshTotalMemorySize ) + , maxMeshOutputVertices( rhs.maxMeshOutputVertices ) + , maxMeshOutputPrimitives( rhs.maxMeshOutputPrimitives ) + , maxMeshMultiviewViewCount( rhs.maxMeshMultiviewViewCount ) + , meshOutputPerVertexGranularity( rhs.meshOutputPerVertexGranularity ) + , meshOutputPerPrimitiveGranularity( rhs.meshOutputPerPrimitiveGranularity ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize ); + } + + PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMeshShaderPropertiesNV ) - offsetof( PhysicalDeviceMeshShaderPropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMeshShaderPropertiesNV& operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount ) + && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations ) + && ( memcmp( maxTaskWorkGroupSize, rhs.maxTaskWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize ) + && ( maxTaskOutputCount == rhs.maxTaskOutputCount ) + && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations ) + && ( memcmp( maxMeshWorkGroupSize, rhs.maxMeshWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ) + && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize ) + && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices ) + && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives ) + && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount ) + && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity ) + && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity ); + } + + bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV; + void* pNext = {}; + uint32_t maxDrawMeshTasksCount = {}; + uint32_t maxTaskWorkGroupInvocations = {}; + uint32_t maxTaskWorkGroupSize[3] = {}; + uint32_t maxTaskTotalMemorySize = {}; + uint32_t maxTaskOutputCount = {}; + uint32_t maxMeshWorkGroupInvocations = {}; + uint32_t maxMeshWorkGroupSize[3] = {}; + uint32_t maxMeshTotalMemorySize = {}; + uint32_t maxMeshOutputVertices = {}; + uint32_t maxMeshOutputPrimitives = {}; + uint32_t maxMeshMultiviewViewCount = {}; + uint32_t meshOutputPerVertexGranularity = {}; + uint32_t meshOutputPerPrimitiveGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMultiviewFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {} ) VULKAN_HPP_NOEXCEPT + : multiview( multiview_ ) + , multiviewGeometryShader( multiviewGeometryShader_ ) + , multiviewTessellationShader( multiviewTessellationShader_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , multiview( rhs.multiview ) + , multiviewGeometryShader( rhs.multiviewGeometryShader ) + , multiviewTessellationShader( rhs.multiviewTessellationShader ) + {} + + PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewFeatures ) - offsetof( PhysicalDeviceMultiviewFeatures, pNext ) ); + return *this; + } + + PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMultiviewFeatures& operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT + { + multiview = multiview_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; + } + + PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT + { + multiviewTessellationShader = multiviewTessellationShader_; + return *this; + } + + operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( multiview == rhs.multiview ) + && ( multiviewGeometryShader == rhs.multiviewGeometryShader ) + && ( multiviewTessellationShader == rhs.multiviewTessellationShader ); + } + + bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiview = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {}; + VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {} ) VULKAN_HPP_NOEXCEPT + : perViewPositionAllComponents( perViewPositionAllComponents_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , perViewPositionAllComponents( rhs.perViewPositionAllComponents ) + {} + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) - offsetof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, pNext ) ); + return *this; + } + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX& operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents ); + } + + bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceMultiviewProperties + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, + uint32_t maxMultiviewInstanceIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : maxMultiviewViewCount( maxMultiviewViewCount_ ) + , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxMultiviewViewCount( rhs.maxMultiviewViewCount ) + , maxMultiviewInstanceIndex( rhs.maxMultiviewInstanceIndex ) + {} + + PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceMultiviewProperties ) - offsetof( PhysicalDeviceMultiviewProperties, pNext ) ); + return *this; + } + + PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceMultiviewProperties& operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount ) + && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex ); + } + + bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties; + void* pNext = {}; + uint32_t maxMultiviewViewCount = {}; + uint32_t maxMultiviewInstanceIndex = {}; + }; + static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDevicePCIBusInfoPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, + uint32_t pciBus_ = {}, + uint32_t pciDevice_ = {}, + uint32_t pciFunction_ = {} ) VULKAN_HPP_NOEXCEPT + : pciDomain( pciDomain_ ) + , pciBus( pciBus_ ) + , pciDevice( pciDevice_ ) + , pciFunction( pciFunction_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pciDomain( rhs.pciDomain ) + , pciBus( rhs.pciBus ) + , pciDevice( rhs.pciDevice ) + , pciFunction( rhs.pciFunction ) + {} + + PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) - offsetof( PhysicalDevicePCIBusInfoPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevicePCIBusInfoPropertiesEXT& operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pciDomain == rhs.pciDomain ) + && ( pciBus == rhs.pciBus ) + && ( pciDevice == rhs.pciDevice ) + && ( pciFunction == rhs.pciFunction ); + } + + bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT; + void* pNext = {}; + uint32_t pciDomain = {}; + uint32_t pciBus = {}; + uint32_t pciDevice = {}; + uint32_t pciFunction = {}; + }; + static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : pipelineExecutableInfo( pipelineExecutableInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipelineExecutableInfo( rhs.pipelineExecutableInfo ) + {} + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) - offsetof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR& operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT + { + pipelineExecutableInfo = pipelineExecutableInfo_; + return *this; + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo ); + } + + bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {}; + }; + static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDevicePointClippingProperties + { + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes ) VULKAN_HPP_NOEXCEPT + : pointClippingBehavior( pointClippingBehavior_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pointClippingBehavior( rhs.pointClippingBehavior ) + {} + + PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePointClippingProperties ) - offsetof( PhysicalDevicePointClippingProperties, pNext ) ); + return *this; + } + + PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevicePointClippingProperties& operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default; +#else + bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pointClippingBehavior == rhs.pointClippingBehavior ); + } + + bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes; + }; + static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSparseProperties + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) + , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) + , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) + , residencyAlignedMipSize( residencyAlignedMipSize_ ) + , residencyNonResidentStrict( residencyNonResidentStrict_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape( rhs.residencyStandard2DBlockShape ) + , residencyStandard2DMultisampleBlockShape( rhs.residencyStandard2DMultisampleBlockShape ) + , residencyStandard3DBlockShape( rhs.residencyStandard3DBlockShape ) + , residencyAlignedMipSize( rhs.residencyAlignedMipSize ) + , residencyNonResidentStrict( rhs.residencyNonResidentStrict ) + {} + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PhysicalDeviceSparseProperties ) ); + return *this; + } + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSparseProperties& operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) + && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) + && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) + && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) + && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); + } + + bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const& deviceName_ = {}, + std::array const& pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion( apiVersion_ ) + , driverVersion( driverVersion_ ) + , vendorID( vendorID_ ) + , deviceID( deviceID_ ) + , deviceType( deviceType_ ) + , deviceName{} + , pipelineCacheUUID{} + , limits( limits_ ) + , sparseProperties( sparseProperties_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceName, deviceName_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pipelineCacheUUID, pipelineCacheUUID_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : apiVersion( rhs.apiVersion ) + , driverVersion( rhs.driverVersion ) + , vendorID( rhs.vendorID ) + , deviceID( rhs.deviceID ) + , deviceType( rhs.deviceType ) + , deviceName{} + , pipelineCacheUUID{} + , limits( rhs.limits ) + , sparseProperties( rhs.sparseProperties ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( deviceName, rhs.deviceName ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( pipelineCacheUUID, rhs.pipelineCacheUUID ); + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PhysicalDeviceProperties ) ); + return *this; + } + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceProperties& operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) + && ( driverVersion == rhs.driverVersion ) + && ( vendorID == rhs.vendorID ) + && ( deviceID == rhs.deviceID ) + && ( deviceType == rhs.deviceType ) + && ( memcmp( deviceName, rhs.deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE * sizeof( char ) ) == 0 ) + && ( memcmp( pipelineCacheUUID, rhs.pipelineCacheUUID, VK_UUID_SIZE * sizeof( uint8_t ) ) == 0 ) + && ( limits == rhs.limits ) + && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE] = {}; + uint8_t pipelineCacheUUID[VK_UUID_SIZE] = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProperties2 + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , properties( rhs.properties ) + {} + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProperties2 ) - offsetof( PhysicalDeviceProperties2, pNext ) ); + return *this; + } + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceProperties2& operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProperties2 const& ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProtectedMemoryFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedMemory( protectedMemory_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , protectedMemory( rhs.protectedMemory ) + {} + + PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryFeatures ) - offsetof( PhysicalDeviceProtectedMemoryFeatures, pNext ) ); + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceProtectedMemoryFeatures& operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT + { + protectedMemory = protectedMemory_; + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedMemory == rhs.protectedMemory ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceProtectedMemoryProperties + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedNoFault( protectedNoFault_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , protectedNoFault( rhs.protectedNoFault ) + {} + + PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceProtectedMemoryProperties ) - offsetof( PhysicalDeviceProtectedMemoryProperties, pNext ) ); + return *this; + } + + PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceProtectedMemoryProperties& operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedNoFault == rhs.protectedNoFault ); + } + + bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {}; + }; + static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDevicePushDescriptorPropertiesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {} ) VULKAN_HPP_NOEXCEPT + : maxPushDescriptors( maxPushDescriptors_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxPushDescriptors( rhs.maxPushDescriptors ) + {} + + PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) - offsetof( PhysicalDevicePushDescriptorPropertiesKHR, pNext ) ); + return *this; + } + + PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDevicePushDescriptorPropertiesKHR& operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default; +#else + bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxPushDescriptors == rhs.maxPushDescriptors ); + } + + bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + void* pNext = {}; + uint32_t maxPushDescriptors = {}; + }; + static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceRayTracingPropertiesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( uint32_t shaderGroupHandleSize_ = {}, + uint32_t maxRecursionDepth_ = {}, + uint32_t maxShaderGroupStride_ = {}, + uint32_t shaderGroupBaseAlignment_ = {}, + uint64_t maxGeometryCount_ = {}, + uint64_t maxInstanceCount_ = {}, + uint64_t maxTriangleCount_ = {}, + uint32_t maxDescriptorSetAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderGroupHandleSize( shaderGroupHandleSize_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , maxShaderGroupStride( maxShaderGroupStride_ ) + , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) + , maxGeometryCount( maxGeometryCount_ ) + , maxInstanceCount( maxInstanceCount_ ) + , maxTriangleCount( maxTriangleCount_ ) + , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderGroupHandleSize( rhs.shaderGroupHandleSize ) + , maxRecursionDepth( rhs.maxRecursionDepth ) + , maxShaderGroupStride( rhs.maxShaderGroupStride ) + , shaderGroupBaseAlignment( rhs.shaderGroupBaseAlignment ) + , maxGeometryCount( rhs.maxGeometryCount ) + , maxInstanceCount( rhs.maxInstanceCount ) + , maxTriangleCount( rhs.maxTriangleCount ) + , maxDescriptorSetAccelerationStructures( rhs.maxDescriptorSetAccelerationStructures ) + {} + + PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRayTracingPropertiesNV ) - offsetof( PhysicalDeviceRayTracingPropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceRayTracingPropertiesNV& operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( maxShaderGroupStride == rhs.maxShaderGroupStride ) + && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment ) + && ( maxGeometryCount == rhs.maxGeometryCount ) + && ( maxInstanceCount == rhs.maxInstanceCount ) + && ( maxTriangleCount == rhs.maxTriangleCount ) + && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures ); + } + + bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV; + void* pNext = {}; + uint32_t shaderGroupHandleSize = {}; + uint32_t maxRecursionDepth = {}; + uint32_t maxShaderGroupStride = {}; + uint32_t shaderGroupBaseAlignment = {}; + uint64_t maxGeometryCount = {}; + uint64_t maxInstanceCount = {}; + uint64_t maxTriangleCount = {}; + uint32_t maxDescriptorSetAccelerationStructures = {}; + }; + static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTest( representativeFragmentTest_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , representativeFragmentTest( rhs.representativeFragmentTest ) + {} + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) - offsetof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV& operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTest = representativeFragmentTest_; + return *this; + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( representativeFragmentTest == rhs.representativeFragmentTest ); + } + + bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {}; + }; + static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSampleLocationsPropertiesEXT + { + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, + std::array const& sampleLocationCoordinateRange_ = {}, + uint32_t sampleLocationSubPixelBits_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationSampleCounts( sampleLocationSampleCounts_ ) + , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + , sampleLocationCoordinateRange{} + , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) + , variableSampleLocations( variableSampleLocations_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( sampleLocationCoordinateRange, sampleLocationCoordinateRange_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , sampleLocationSampleCounts( rhs.sampleLocationSampleCounts ) + , maxSampleLocationGridSize( rhs.maxSampleLocationGridSize ) + , sampleLocationCoordinateRange{} + , sampleLocationSubPixelBits( rhs.sampleLocationSubPixelBits ) + , variableSampleLocations( rhs.variableSampleLocations ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange ); + } + + PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) - offsetof( PhysicalDeviceSampleLocationsPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSampleLocationsPropertiesEXT& operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts ) + && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize ) + && ( memcmp( sampleLocationCoordinateRange, rhs.sampleLocationCoordinateRange, 2 * sizeof( float ) ) == 0 ) + && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits ) + && ( variableSampleLocations == rhs.variableSampleLocations ); + } + + bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {}; + float sampleLocationCoordinateRange[2] = {}; + uint32_t sampleLocationSubPixelBits = {}; + VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {}; + }; + static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSamplerFilterMinmaxPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {} ) VULKAN_HPP_NOEXCEPT + : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) + , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , filterMinmaxSingleComponentFormats( rhs.filterMinmaxSingleComponentFormats ) + , filterMinmaxImageComponentMapping( rhs.filterMinmaxImageComponentMapping ) + {} + + PhysicalDeviceSamplerFilterMinmaxPropertiesEXT & operator=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) - offsetof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceSamplerFilterMinmaxPropertiesEXT( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSamplerFilterMinmaxPropertiesEXT& operator=( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats ) + && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping ); + } + + bool operator!=( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {}; + VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxPropertiesEXT ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSamplerYcbcrConversionFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {} ) VULKAN_HPP_NOEXCEPT + : samplerYcbcrConversion( samplerYcbcrConversion_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , samplerYcbcrConversion( rhs.samplerYcbcrConversion ) + {} + + PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) - offsetof( PhysicalDeviceSamplerYcbcrConversionFeatures, pNext ) ); + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures& operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT + { + samplerYcbcrConversion = samplerYcbcrConversion_; + return *this; + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion ); + } + + bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {}; + }; + static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceScalarBlockLayoutFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : scalarBlockLayout( scalarBlockLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeaturesEXT( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , scalarBlockLayout( rhs.scalarBlockLayout ) + {} + + PhysicalDeviceScalarBlockLayoutFeaturesEXT & operator=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) - offsetof( PhysicalDeviceScalarBlockLayoutFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeaturesEXT( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceScalarBlockLayoutFeaturesEXT& operator=( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceScalarBlockLayoutFeaturesEXT & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT + { + scalarBlockLayout = scalarBlockLayout_; + return *this; + } + + operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceScalarBlockLayoutFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( scalarBlockLayout == rhs.scalarBlockLayout ); + } + + bool operator!=( PhysicalDeviceScalarBlockLayoutFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {}; + }; + static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeaturesEXT ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderAtomicInt64FeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) + , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64FeaturesKHR( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderBufferInt64Atomics( rhs.shaderBufferInt64Atomics ) + , shaderSharedInt64Atomics( rhs.shaderSharedInt64Atomics ) + {} + + PhysicalDeviceShaderAtomicInt64FeaturesKHR & operator=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) - offsetof( PhysicalDeviceShaderAtomicInt64FeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR& operator=( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderBufferInt64Atomics = shaderBufferInt64Atomics_; + return *this; + } + + PhysicalDeviceShaderAtomicInt64FeaturesKHR & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT + { + shaderSharedInt64Atomics = shaderSharedInt64Atomics_; + return *this; + } + + operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderAtomicInt64FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics ) + && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics ); + } + + bool operator!=( PhysicalDeviceShaderAtomicInt64FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64FeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderAtomicInt64FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderAtomicInt64FeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderCoreProperties2AMD + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, + uint32_t activeComputeUnitCount_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderCoreFeatures( shaderCoreFeatures_ ) + , activeComputeUnitCount( activeComputeUnitCount_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderCoreFeatures( rhs.shaderCoreFeatures ) + , activeComputeUnitCount( rhs.activeComputeUnitCount ) + {} + + PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCoreProperties2AMD ) - offsetof( PhysicalDeviceShaderCoreProperties2AMD, pNext ) ); + return *this; + } + + PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderCoreProperties2AMD& operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderCoreFeatures == rhs.shaderCoreFeatures ) + && ( activeComputeUnitCount == rhs.activeComputeUnitCount ); + } + + bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {}; + uint32_t activeComputeUnitCount = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderCorePropertiesAMD + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( uint32_t shaderEngineCount_ = {}, + uint32_t shaderArraysPerEngineCount_ = {}, + uint32_t computeUnitsPerShaderArray_ = {}, + uint32_t simdPerComputeUnit_ = {}, + uint32_t wavefrontsPerSimd_ = {}, + uint32_t wavefrontSize_ = {}, + uint32_t sgprsPerSimd_ = {}, + uint32_t minSgprAllocation_ = {}, + uint32_t maxSgprAllocation_ = {}, + uint32_t sgprAllocationGranularity_ = {}, + uint32_t vgprsPerSimd_ = {}, + uint32_t minVgprAllocation_ = {}, + uint32_t maxVgprAllocation_ = {}, + uint32_t vgprAllocationGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderEngineCount( shaderEngineCount_ ) + , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) + , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) + , simdPerComputeUnit( simdPerComputeUnit_ ) + , wavefrontsPerSimd( wavefrontsPerSimd_ ) + , wavefrontSize( wavefrontSize_ ) + , sgprsPerSimd( sgprsPerSimd_ ) + , minSgprAllocation( minSgprAllocation_ ) + , maxSgprAllocation( maxSgprAllocation_ ) + , sgprAllocationGranularity( sgprAllocationGranularity_ ) + , vgprsPerSimd( vgprsPerSimd_ ) + , minVgprAllocation( minVgprAllocation_ ) + , maxVgprAllocation( maxVgprAllocation_ ) + , vgprAllocationGranularity( vgprAllocationGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderEngineCount( rhs.shaderEngineCount ) + , shaderArraysPerEngineCount( rhs.shaderArraysPerEngineCount ) + , computeUnitsPerShaderArray( rhs.computeUnitsPerShaderArray ) + , simdPerComputeUnit( rhs.simdPerComputeUnit ) + , wavefrontsPerSimd( rhs.wavefrontsPerSimd ) + , wavefrontSize( rhs.wavefrontSize ) + , sgprsPerSimd( rhs.sgprsPerSimd ) + , minSgprAllocation( rhs.minSgprAllocation ) + , maxSgprAllocation( rhs.maxSgprAllocation ) + , sgprAllocationGranularity( rhs.sgprAllocationGranularity ) + , vgprsPerSimd( rhs.vgprsPerSimd ) + , minVgprAllocation( rhs.minVgprAllocation ) + , maxVgprAllocation( rhs.maxVgprAllocation ) + , vgprAllocationGranularity( rhs.vgprAllocationGranularity ) + {} + + PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderCorePropertiesAMD ) - offsetof( PhysicalDeviceShaderCorePropertiesAMD, pNext ) ); + return *this; + } + + PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderCorePropertiesAMD& operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderEngineCount == rhs.shaderEngineCount ) + && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount ) + && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray ) + && ( simdPerComputeUnit == rhs.simdPerComputeUnit ) + && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd ) + && ( wavefrontSize == rhs.wavefrontSize ) + && ( sgprsPerSimd == rhs.sgprsPerSimd ) + && ( minSgprAllocation == rhs.minSgprAllocation ) + && ( maxSgprAllocation == rhs.maxSgprAllocation ) + && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity ) + && ( vgprsPerSimd == rhs.vgprsPerSimd ) + && ( minVgprAllocation == rhs.minVgprAllocation ) + && ( maxVgprAllocation == rhs.maxVgprAllocation ) + && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity ); + } + + bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD; + void* pNext = {}; + uint32_t shaderEngineCount = {}; + uint32_t shaderArraysPerEngineCount = {}; + uint32_t computeUnitsPerShaderArray = {}; + uint32_t simdPerComputeUnit = {}; + uint32_t wavefrontsPerSimd = {}; + uint32_t wavefrontSize = {}; + uint32_t sgprsPerSimd = {}; + uint32_t minSgprAllocation = {}; + uint32_t maxSgprAllocation = {}; + uint32_t sgprAllocationGranularity = {}; + uint32_t vgprsPerSimd = {}; + uint32_t minVgprAllocation = {}; + uint32_t maxVgprAllocation = {}; + uint32_t vgprAllocationGranularity = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderDemoteToHelperInvocation( rhs.shaderDemoteToHelperInvocation ) + {} + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) - offsetof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT& operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT + { + shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; + return *this; + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation ); + } + + bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderDrawParametersFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderDrawParameters( shaderDrawParameters_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderDrawParameters( rhs.shaderDrawParameters ) + {} + + PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderDrawParametersFeatures ) - offsetof( PhysicalDeviceShaderDrawParametersFeatures, pNext ) ); + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderDrawParametersFeatures& operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT + { + shaderDrawParameters = shaderDrawParameters_; + return *this; + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderDrawParameters == rhs.shaderDrawParameters ); + } + + bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderFloat16Int8FeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderFloat16( shaderFloat16_ ) + , shaderInt8( shaderInt8_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8FeaturesKHR( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderFloat16( rhs.shaderFloat16 ) + , shaderInt8( rhs.shaderInt8 ) + {} + + PhysicalDeviceShaderFloat16Int8FeaturesKHR & operator=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) - offsetof( PhysicalDeviceShaderFloat16Int8FeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceShaderFloat16Int8FeaturesKHR( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderFloat16Int8FeaturesKHR& operator=( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderFloat16Int8FeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloat16 = shaderFloat16_; + return *this; + } + + PhysicalDeviceShaderFloat16Int8FeaturesKHR & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT + { + shaderInt8 = shaderInt8_; + return *this; + } + + operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderFloat16Int8FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderFloat16 == rhs.shaderFloat16 ) + && ( shaderInt8 == rhs.shaderInt8 ); + } + + bool operator!=( PhysicalDeviceShaderFloat16Int8FeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8FeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderFloat16Int8FeaturesKHR ) == sizeof( VkPhysicalDeviceShaderFloat16Int8FeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderImageFootprintFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {} ) VULKAN_HPP_NOEXCEPT + : imageFootprint( imageFootprint_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , imageFootprint( rhs.imageFootprint ) + {} + + PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) - offsetof( PhysicalDeviceShaderImageFootprintFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV& operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT + { + imageFootprint = imageFootprint_; + return *this; + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( imageFootprint == rhs.imageFootprint ); + } + + bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderIntegerFunctions2( rhs.shaderIntegerFunctions2 ) + {} + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) - offsetof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, pNext ) ); + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL& operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT + { + shaderIntegerFunctions2 = shaderIntegerFunctions2_; + return *this; + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); + } + + bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSMBuiltins( shaderSMBuiltins_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderSMBuiltins( rhs.shaderSMBuiltins ) + {} + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT + { + shaderSMBuiltins = shaderSMBuiltins_; + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderSMBuiltins == rhs.shaderSMBuiltins ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShaderSMBuiltinsPropertiesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, + uint32_t shaderWarpsPerSM_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderSMCount( shaderSMCount_ ) + , shaderWarpsPerSM( shaderWarpsPerSM_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shaderSMCount( rhs.shaderSMCount ) + , shaderWarpsPerSM( rhs.shaderWarpsPerSM ) + {} + + PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) - offsetof( PhysicalDeviceShaderSMBuiltinsPropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShaderSMBuiltinsPropertiesNV& operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shaderSMCount == rhs.shaderSMCount ) + && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM ); + } + + bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV; + void* pNext = {}; + uint32_t shaderSMCount = {}; + uint32_t shaderWarpsPerSM = {}; + }; + static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShadingRateImageFeaturesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImage( shadingRateImage_ ) + , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shadingRateImage( rhs.shadingRateImage ) + , shadingRateCoarseSampleOrder( rhs.shadingRateCoarseSampleOrder ) + {} + + PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) - offsetof( PhysicalDeviceShadingRateImageFeaturesNV, pNext ) ); + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShadingRateImageFeaturesNV& operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImage = shadingRateImage_; + return *this; + } + + PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_; + return *this; + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateImage == rhs.shadingRateImage ) + && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder ); + } + + bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceShadingRateImagePropertiesNV + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, + uint32_t shadingRatePaletteSize_ = {}, + uint32_t shadingRateMaxCoarseSamples_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateTexelSize( shadingRateTexelSize_ ) + , shadingRatePaletteSize( shadingRatePaletteSize_ ) + , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shadingRateTexelSize( rhs.shadingRateTexelSize ) + , shadingRatePaletteSize( rhs.shadingRatePaletteSize ) + , shadingRateMaxCoarseSamples( rhs.shadingRateMaxCoarseSamples ) + {} + + PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) - offsetof( PhysicalDeviceShadingRateImagePropertiesNV, pNext ) ); + return *this; + } + + PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceShadingRateImagePropertiesNV& operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default; +#else + bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateTexelSize == rhs.shadingRateTexelSize ) + && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize ) + && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples ); + } + + bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {}; + uint32_t shadingRatePaletteSize = {}; + uint32_t shadingRateMaxCoarseSamples = {}; + }; + static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSparseImageFormatInfo2 + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, + VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , type( type_ ) + , samples( samples_ ) + , usage( usage_ ) + , tiling( tiling_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , format( rhs.format ) + , type( rhs.type ) + , samples( rhs.samples ) + , usage( rhs.usage ) + , tiling( rhs.tiling ) + {} + + PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSparseImageFormatInfo2 ) - offsetof( PhysicalDeviceSparseImageFormatInfo2, pNext ) ); + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSparseImageFormatInfo2& operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT + { + samples = samples_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT + { + usage = usage_; + return *this; + } + + PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT + { + tiling = tiling_; + return *this; + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default; +#else + bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( type == rhs.type ) + && ( samples == rhs.samples ) + && ( usage == rhs.usage ) + && ( tiling == rhs.tiling ); + } + + bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D; + VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1; + VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {}; + VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal; + }; + static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSubgroupProperties + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( uint32_t subgroupSize_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSize( subgroupSize_ ) + , supportedStages( supportedStages_ ) + , supportedOperations( supportedOperations_ ) + , quadOperationsInAllStages( quadOperationsInAllStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , subgroupSize( rhs.subgroupSize ) + , supportedStages( rhs.supportedStages ) + , supportedOperations( rhs.supportedOperations ) + , quadOperationsInAllStages( rhs.quadOperationsInAllStages ) + {} + + PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupProperties ) - offsetof( PhysicalDeviceSubgroupProperties, pNext ) ); + return *this; + } + + PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSubgroupProperties& operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subgroupSize == rhs.subgroupSize ) + && ( supportedStages == rhs.supportedStages ) + && ( supportedOperations == rhs.supportedOperations ) + && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages ); + } + + bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties; + void* pNext = {}; + uint32_t subgroupSize = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {}; + VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSubgroupSizeControlFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {} ) VULKAN_HPP_NOEXCEPT + : subgroupSizeControl( subgroupSizeControl_ ) + , computeFullSubgroups( computeFullSubgroups_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , subgroupSizeControl( rhs.subgroupSizeControl ) + , computeFullSubgroups( rhs.computeFullSubgroups ) + {} + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT + { + subgroupSizeControl = subgroupSizeControl_; + return *this; + } + + PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT + { + computeFullSubgroups = computeFullSubgroups_; + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subgroupSizeControl == rhs.subgroupSizeControl ) + && ( computeFullSubgroups == rhs.computeFullSubgroups ); + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSubgroupSizeControlPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( uint32_t minSubgroupSize_ = {}, + uint32_t maxSubgroupSize_ = {}, + uint32_t maxComputeWorkgroupSubgroups_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {} ) VULKAN_HPP_NOEXCEPT + : minSubgroupSize( minSubgroupSize_ ) + , maxSubgroupSize( maxSubgroupSize_ ) + , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) + , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , minSubgroupSize( rhs.minSubgroupSize ) + , maxSubgroupSize( rhs.maxSubgroupSize ) + , maxComputeWorkgroupSubgroups( rhs.maxComputeWorkgroupSubgroups ) + , requiredSubgroupSizeStages( rhs.requiredSubgroupSizeStages ) + {} + + PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) - offsetof( PhysicalDeviceSubgroupSizeControlPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSubgroupSizeControlPropertiesEXT& operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minSubgroupSize == rhs.minSubgroupSize ) + && ( maxSubgroupSize == rhs.maxSubgroupSize ) + && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups ) + && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages ); + } + + bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT; + void* pNext = {}; + uint32_t minSubgroupSize = {}; + uint32_t maxSubgroupSize = {}; + uint32_t maxComputeWorkgroupSubgroups = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {}; + }; + static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceSurfaceInfo2KHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {} ) VULKAN_HPP_NOEXCEPT + : surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , surface( rhs.surface ) + {} + + PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceSurfaceInfo2KHR ) - offsetof( PhysicalDeviceSurfaceInfo2KHR, pNext ) ); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceSurfaceInfo2KHR& operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default; +#else + bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surface == rhs.surface ); + } + + bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + }; + static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : texelBufferAlignment( texelBufferAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , texelBufferAlignment( rhs.texelBufferAlignment ) + {} + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT + { + texelBufferAlignment = texelBufferAlignment_; + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( texelBufferAlignment == rhs.texelBufferAlignment ); + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {} ) VULKAN_HPP_NOEXCEPT + : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) + , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) + , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) + , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , storageTexelBufferOffsetAlignmentBytes( rhs.storageTexelBufferOffsetAlignmentBytes ) + , storageTexelBufferOffsetSingleTexelAlignment( rhs.storageTexelBufferOffsetSingleTexelAlignment ) + , uniformTexelBufferOffsetAlignmentBytes( rhs.uniformTexelBufferOffsetAlignmentBytes ) + , uniformTexelBufferOffsetSingleTexelAlignment( rhs.uniformTexelBufferOffsetSingleTexelAlignment ) + {} + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) - offsetof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceTexelBufferAlignmentPropertiesEXT& operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes ) + && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment ) + && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes ) + && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment ); + } + + bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {}; + VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {}; + }; + static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {} ) VULKAN_HPP_NOEXCEPT + : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , textureCompressionASTC_HDR( rhs.textureCompressionASTC_HDR ) + {} + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) - offsetof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT& operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT + { + textureCompressionASTC_HDR = textureCompressionASTC_HDR_; + return *this; + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR ); + } + + bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {}; + }; + static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceTransformFeedbackFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {} ) VULKAN_HPP_NOEXCEPT + : transformFeedback( transformFeedback_ ) + , geometryStreams( geometryStreams_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , transformFeedback( rhs.transformFeedback ) + , geometryStreams( rhs.geometryStreams ) + {} + + PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) - offsetof( PhysicalDeviceTransformFeedbackFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT& operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT + { + transformFeedback = transformFeedback_; + return *this; + } + + PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT + { + geometryStreams = geometryStreams_; + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( transformFeedback == rhs.transformFeedback ) + && ( geometryStreams == rhs.geometryStreams ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {}; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceTransformFeedbackPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( uint32_t maxTransformFeedbackStreams_ = {}, + uint32_t maxTransformFeedbackBuffers_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, + uint32_t maxTransformFeedbackStreamDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataSize_ = {}, + uint32_t maxTransformFeedbackBufferDataStride_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {} ) VULKAN_HPP_NOEXCEPT + : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) + , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) + , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) + , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) + , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) + , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) + , transformFeedbackQueries( transformFeedbackQueries_ ) + , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) + , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) + , transformFeedbackDraw( transformFeedbackDraw_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxTransformFeedbackStreams( rhs.maxTransformFeedbackStreams ) + , maxTransformFeedbackBuffers( rhs.maxTransformFeedbackBuffers ) + , maxTransformFeedbackBufferSize( rhs.maxTransformFeedbackBufferSize ) + , maxTransformFeedbackStreamDataSize( rhs.maxTransformFeedbackStreamDataSize ) + , maxTransformFeedbackBufferDataSize( rhs.maxTransformFeedbackBufferDataSize ) + , maxTransformFeedbackBufferDataStride( rhs.maxTransformFeedbackBufferDataStride ) + , transformFeedbackQueries( rhs.transformFeedbackQueries ) + , transformFeedbackStreamsLinesTriangles( rhs.transformFeedbackStreamsLinesTriangles ) + , transformFeedbackRasterizationStreamSelect( rhs.transformFeedbackRasterizationStreamSelect ) + , transformFeedbackDraw( rhs.transformFeedbackDraw ) + {} + + PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) - offsetof( PhysicalDeviceTransformFeedbackPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceTransformFeedbackPropertiesEXT& operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams ) + && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers ) + && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize ) + && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize ) + && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize ) + && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride ) + && ( transformFeedbackQueries == rhs.transformFeedbackQueries ) + && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles ) + && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect ) + && ( transformFeedbackDraw == rhs.transformFeedbackDraw ); + } + + bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT; + void* pNext = {}; + uint32_t maxTransformFeedbackStreams = {}; + uint32_t maxTransformFeedbackBuffers = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {}; + uint32_t maxTransformFeedbackStreamDataSize = {}; + uint32_t maxTransformFeedbackBufferDataSize = {}; + uint32_t maxTransformFeedbackBufferDataStride = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {}; + VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {}; + }; + static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {} ) VULKAN_HPP_NOEXCEPT + : uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , uniformBufferStandardLayout( rhs.uniformBufferStandardLayout ) + {} + + PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & operator=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) - offsetof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR& operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT + { + uniformBufferStandardLayout = uniformBufferStandardLayout_; + return *this; + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout ); + } + + bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {}; + }; + static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceVariablePointersFeatures + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {} ) VULKAN_HPP_NOEXCEPT + : variablePointersStorageBuffer( variablePointersStorageBuffer_ ) + , variablePointers( variablePointers_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , variablePointersStorageBuffer( rhs.variablePointersStorageBuffer ) + , variablePointers( rhs.variablePointers ) + {} + + PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVariablePointersFeatures ) - offsetof( PhysicalDeviceVariablePointersFeatures, pNext ) ); + return *this; + } + + PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceVariablePointersFeatures& operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT + { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; + } + + PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT + { + variablePointers = variablePointers_; + return *this; + } + + operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default; +#else + bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer ) + && ( variablePointers == rhs.variablePointers ); + } + + bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {}; + VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {}; + }; + static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) + , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , vertexAttributeInstanceRateDivisor( rhs.vertexAttributeInstanceRateDivisor ) + , vertexAttributeInstanceRateZeroDivisor( rhs.vertexAttributeInstanceRateZeroDivisor ) + {} + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) + && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {} ) VULKAN_HPP_NOEXCEPT + : maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , maxVertexAttribDivisor( rhs.maxVertexAttribDivisor ) + {} + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) - offsetof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceVertexAttributeDivisorPropertiesEXT& operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ); + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT; + void* pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + }; + static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceVulkanMemoryModelFeaturesKHR + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {} ) VULKAN_HPP_NOEXCEPT + : vulkanMemoryModel( vulkanMemoryModel_ ) + , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) + , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeaturesKHR( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , vulkanMemoryModel( rhs.vulkanMemoryModel ) + , vulkanMemoryModelDeviceScope( rhs.vulkanMemoryModelDeviceScope ) + , vulkanMemoryModelAvailabilityVisibilityChains( rhs.vulkanMemoryModelAvailabilityVisibilityChains ) + {} + + PhysicalDeviceVulkanMemoryModelFeaturesKHR & operator=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) - offsetof( PhysicalDeviceVulkanMemoryModelFeaturesKHR, pNext ) ); + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR& operator=( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModel = vulkanMemoryModel_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; + } + + PhysicalDeviceVulkanMemoryModelFeaturesKHR & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT + { + vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + return *this; + } + + operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkanMemoryModelFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& ) const = default; +#else + bool operator==( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vulkanMemoryModel == rhs.vulkanMemoryModel ) + && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope ) + && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains ); + } + + bool operator!=( PhysicalDeviceVulkanMemoryModelFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeaturesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {}; + }; + static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeaturesKHR ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeaturesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PhysicalDeviceYcbcrImageArraysFeaturesEXT + { + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {} ) VULKAN_HPP_NOEXCEPT + : ycbcrImageArrays( ycbcrImageArrays_ ) + {} + + VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , ycbcrImageArrays( rhs.ycbcrImageArrays ) + {} + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) - offsetof( PhysicalDeviceYcbcrImageArraysFeaturesEXT, pNext ) ); + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT& operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrImageArrays = ycbcrImageArrays_; + return *this; + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default; +#else + bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( ycbcrImageArrays == rhs.ycbcrImageArrays ); + } + + bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {}; + }; + static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPipeline.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPipeline.hpp new file mode 100644 index 00000000..267e7bed --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPipeline.hpp @@ -0,0 +1,3258 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPipeline.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct PipelineCacheCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, + size_t initialDataSize_ = {}, + const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , initialDataSize( rhs.initialDataSize ) + , pInitialData( rhs.pInitialData ) + {} + + PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCacheCreateInfo ) - offsetof( PipelineCacheCreateInfo, pNext ) ); + return *this; + } + + PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCacheCreateInfo& operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + + operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCacheCreateInfo const& ) const = default; +#else + bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {}; + size_t initialDataSize = {}; + const void* pInitialData = {}; + }; + static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineColorBlendAdvancedStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated ) VULKAN_HPP_NOEXCEPT + : srcPremultiplied( srcPremultiplied_ ) + , dstPremultiplied( dstPremultiplied_ ) + , blendOverlap( blendOverlap_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcPremultiplied( rhs.srcPremultiplied ) + , dstPremultiplied( rhs.dstPremultiplied ) + , blendOverlap( rhs.blendOverlap ) + {} + + PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) - offsetof( PipelineColorBlendAdvancedStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT& operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + srcPremultiplied = srcPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT + { + dstPremultiplied = dstPremultiplied_; + return *this; + } + + PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT + { + blendOverlap = blendOverlap_; + return *this; + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcPremultiplied == rhs.srcPremultiplied ) + && ( dstPremultiplied == rhs.dstPremultiplied ) + && ( blendOverlap == rhs.blendOverlap ); + } + + bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {}; + VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {}; + VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated; + }; + static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCompilerControlCreateInfoAMD + { + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : compilerControlFlags( compilerControlFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , compilerControlFlags( rhs.compilerControlFlags ) + {} + + PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCompilerControlCreateInfoAMD ) - offsetof( PipelineCompilerControlCreateInfoAMD, pNext ) ); + return *this; + } + + PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCompilerControlCreateInfoAMD& operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT + { + compilerControlFlags = compilerControlFlags_; + return *this; + } + + operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default; +#else + bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( compilerControlFlags == rhs.compilerControlFlags ); + } + + bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {}; + }; + static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCoverageModulationStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, + uint32_t coverageModulationTableCount_ = {}, + const float* pCoverageModulationTable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageModulationMode( coverageModulationMode_ ) + , coverageModulationTableEnable( coverageModulationTableEnable_ ) + , coverageModulationTableCount( coverageModulationTableCount_ ) + , pCoverageModulationTable( pCoverageModulationTable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , coverageModulationMode( rhs.coverageModulationMode ) + , coverageModulationTableEnable( rhs.coverageModulationTableEnable ) + , coverageModulationTableCount( rhs.coverageModulationTableCount ) + , pCoverageModulationTable( rhs.pCoverageModulationTable ) + {} + + PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageModulationStateCreateInfoNV ) - offsetof( PipelineCoverageModulationStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCoverageModulationStateCreateInfoNV& operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationMode = coverageModulationMode_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableEnable = coverageModulationTableEnable_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT + { + coverageModulationTableCount = coverageModulationTableCount_; + return *this; + } + + PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT + { + pCoverageModulationTable = pCoverageModulationTable_; + return *this; + } + + operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageModulationMode == rhs.coverageModulationMode ) + && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable ) + && ( coverageModulationTableCount == rhs.coverageModulationTableCount ) + && ( pCoverageModulationTable == rhs.pCoverageModulationTable ); + } + + bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone; + VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {}; + uint32_t coverageModulationTableCount = {}; + const float* pCoverageModulationTable = {}; + }; + static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCoverageReductionStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageReductionMode( coverageReductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , coverageReductionMode( rhs.coverageReductionMode ) + {} + + PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageReductionStateCreateInfoNV ) - offsetof( PipelineCoverageReductionStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCoverageReductionStateCreateInfoNV& operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT + { + coverageReductionMode = coverageReductionMode_; + return *this; + } + + operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageReductionMode == rhs.coverageReductionMode ); + } + + bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge; + }; + static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCoverageToColorStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, + uint32_t coverageToColorLocation_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , coverageToColorEnable( coverageToColorEnable_ ) + , coverageToColorLocation( coverageToColorLocation_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , coverageToColorEnable( rhs.coverageToColorEnable ) + , coverageToColorLocation( rhs.coverageToColorLocation ) + {} + + PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCoverageToColorStateCreateInfoNV ) - offsetof( PipelineCoverageToColorStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCoverageToColorStateCreateInfoNV& operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorEnable = coverageToColorEnable_; + return *this; + } + + PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT + { + coverageToColorLocation = coverageToColorLocation_; + return *this; + } + + operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( coverageToColorEnable == rhs.coverageToColorEnable ) + && ( coverageToColorLocation == rhs.coverageToColorLocation ); + } + + bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {}; + uint32_t coverageToColorLocation = {}; + }; + static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCreationFeedbackEXT + { + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, + uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , duration( duration_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : flags( rhs.flags ) + , duration( rhs.duration ) + {} + + PipelineCreationFeedbackEXT & operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PipelineCreationFeedbackEXT ) ); + return *this; + } + + PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCreationFeedbackEXT& operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default; +#else + bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) + && ( duration == rhs.duration ); + } + + bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {}; + uint64_t duration = {}; + }; + static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineCreationFeedbackCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, + uint32_t pipelineStageCreationFeedbackCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {} ) VULKAN_HPP_NOEXCEPT + : pPipelineCreationFeedback( pPipelineCreationFeedback_ ) + , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) + , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pPipelineCreationFeedback( rhs.pPipelineCreationFeedback ) + , pipelineStageCreationFeedbackCount( rhs.pipelineStageCreationFeedbackCount ) + , pPipelineStageCreationFeedbacks( rhs.pPipelineStageCreationFeedbacks ) + {} + + PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineCreationFeedbackCreateInfoEXT ) - offsetof( PipelineCreationFeedbackCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineCreationFeedbackCreateInfoEXT& operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreationFeedback = pPipelineCreationFeedback_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_; + return *this; + } + + PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_; + return *this; + } + + operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback ) + && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount ) + && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks ); + } + + bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {}; + uint32_t pipelineStageCreationFeedbackCount = {}; + VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {}; + }; + static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineDiscardRectangleStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, + uint32_t discardRectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , discardRectangleMode( discardRectangleMode_ ) + , discardRectangleCount( discardRectangleCount_ ) + , pDiscardRectangles( pDiscardRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , discardRectangleMode( rhs.discardRectangleMode ) + , discardRectangleCount( rhs.discardRectangleCount ) + , pDiscardRectangles( rhs.pDiscardRectangles ) + {} + + PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) - offsetof( PipelineDiscardRectangleStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineDiscardRectangleStateCreateInfoEXT& operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleMode = discardRectangleMode_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + discardRectangleCount = discardRectangleCount_; + return *this; + } + + PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pDiscardRectangles = pDiscardRectangles_; + return *this; + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( discardRectangleMode == rhs.discardRectangleMode ) + && ( discardRectangleCount == rhs.discardRectangleCount ) + && ( pDiscardRectangles == rhs.pDiscardRectangles ); + } + + bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive; + uint32_t discardRectangleCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {}; + }; + static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineExecutableInfoKHR + { + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + uint32_t executableIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + , executableIndex( executableIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipeline( rhs.pipeline ) + , executableIndex( rhs.executableIndex ) + {} + + PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableInfoKHR ) - offsetof( PipelineExecutableInfoKHR, pNext ) ); + return *this; + } + + PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineExecutableInfoKHR& operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT + { + executableIndex = executableIndex_; + return *this; + } + + operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutableInfoKHR const& ) const = default; +#else + bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipeline == rhs.pipeline ) + && ( executableIndex == rhs.executableIndex ); + } + + bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + uint32_t executableIndex = {}; + }; + static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineExecutableInternalRepresentationKHR + { + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( std::array const& name_ = {}, + std::array const& description_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, + size_t dataSize_ = {}, + void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + : name{} + , description{} + , isText( isText_ ) + , dataSize( dataSize_ ) + , pData( pData_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , name{} + , description{} + , isText( rhs.isText ) + , dataSize( rhs.dataSize ) + , pData( rhs.pData ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, rhs.name ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, rhs.description ); + } + + PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableInternalRepresentationKHR ) - offsetof( PipelineExecutableInternalRepresentationKHR, pNext ) ); + return *this; + } + + PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineExecutableInternalRepresentationKHR& operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setName( std::array name_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( name, name_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setDescription( std::array description_ ) VULKAN_HPP_NOEXCEPT + { + memcpy( description, description_.data(), VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ); + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setIsText( VULKAN_HPP_NAMESPACE::Bool32 isText_ ) VULKAN_HPP_NOEXCEPT + { + isText = isText_; + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + PipelineExecutableInternalRepresentationKHR & setPData( void* pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default; +#else + bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) + && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) + && ( isText == rhs.isText ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR; + void* pNext = {}; + char name[VK_MAX_DESCRIPTION_SIZE] = {}; + char description[VK_MAX_DESCRIPTION_SIZE] = {}; + VULKAN_HPP_NAMESPACE::Bool32 isText = {}; + size_t dataSize = {}; + void* pData = {}; + }; + static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineExecutablePropertiesKHR + { + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, + std::array const& name_ = {}, + std::array const& description_ = {}, + uint32_t subgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : stages( stages_ ) + , name{} + , description{} + , subgroupSize( subgroupSize_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); + } + + VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , stages( rhs.stages ) + , name{} + , description{} + , subgroupSize( rhs.subgroupSize ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, rhs.name ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, rhs.description ); + } + + PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutablePropertiesKHR ) - offsetof( PipelineExecutablePropertiesKHR, pNext ) ); + return *this; + } + + PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineExecutablePropertiesKHR& operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default; +#else + bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( stages == rhs.stages ) + && ( memcmp( name, rhs.name, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) + && ( memcmp( description, rhs.description, VK_MAX_DESCRIPTION_SIZE * sizeof( char ) ) == 0 ) + && ( subgroupSize == rhs.subgroupSize ); + } + + bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {}; + char name[VK_MAX_DESCRIPTION_SIZE] = {}; + char description[VK_MAX_DESCRIPTION_SIZE] = {}; + uint32_t subgroupSize = {}; + }; + static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + union PipelineExecutableStatisticValueKHR + { + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + } + + PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} ) + { + b32 = b32_; + } + + PipelineExecutableStatisticValueKHR( int64_t i64_ ) + { + i64 = i64_; + } + + PipelineExecutableStatisticValueKHR( uint64_t u64_ ) + { + u64 = u64_; + } + + PipelineExecutableStatisticValueKHR( double f64_ ) + { + f64 = f64_; + } + + PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT + { + b32 = b32_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT + { + i64 = i64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT + { + u64 = u64_; + return *this; + } + + PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT + { + f64 = f64_; + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) ); + return *this; + } + + operator VkPipelineExecutableStatisticValueKHR const&() const + { + return *reinterpret_cast(this); + } + + operator VkPipelineExecutableStatisticValueKHR &() + { + return *reinterpret_cast(this); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + VULKAN_HPP_NAMESPACE::Bool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#else + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct PipelineExecutableStatisticKHR + { + PipelineExecutableStatisticKHR( std::array const& name_ = {}, + std::array const& description_ = {}, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {} ) VULKAN_HPP_NOEXCEPT + : name{} + , description{} + , format( format_ ) + , value( value_ ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, name_ ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, description_ ); + } + + PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , name{} + , description{} + , format( rhs.format ) + , value( rhs.value ) + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( name, rhs.name ); + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( description, rhs.description ); + } + + PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineExecutableStatisticKHR ) - offsetof( PipelineExecutableStatisticKHR, pNext ) ); + return *this; + } + + PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineExecutableStatisticKHR& operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR; + void* pNext = {}; + char name[VK_MAX_DESCRIPTION_SIZE] = {}; + char description[VK_MAX_DESCRIPTION_SIZE] = {}; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32; + VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {}; + }; + static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineInfoKHR + { + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {} ) VULKAN_HPP_NOEXCEPT + : pipeline( pipeline_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , pipeline( rhs.pipeline ) + {} + + PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineInfoKHR ) - offsetof( PipelineInfoKHR, pNext ) ); + return *this; + } + + PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineInfoKHR& operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineInfoKHR const& ) const = default; +#else + bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( pipeline == rhs.pipeline ); + } + + bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PushConstantRange + { + VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags( stageFlags_ ) + , offset( offset_ ) + , size( size_ ) + {} + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const& rhs ) VULKAN_HPP_NOEXCEPT + : stageFlags( rhs.stageFlags ) + , offset( rhs.offset ) + , size( rhs.size ) + {} + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PushConstantRange ) ); + return *this; + } + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PushConstantRange& operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PushConstantRange const& ) const = default; +#else + bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( stageFlags == rhs.stageFlags ) + && ( offset == rhs.offset ) + && ( size == rhs.size ); + } + + bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineLayoutCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, + uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , setLayoutCount( setLayoutCount_ ) + , pSetLayouts( pSetLayouts_ ) + , pushConstantRangeCount( pushConstantRangeCount_ ) + , pPushConstantRanges( pPushConstantRanges_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , setLayoutCount( rhs.setLayoutCount ) + , pSetLayouts( rhs.pSetLayouts ) + , pushConstantRangeCount( rhs.pushConstantRangeCount ) + , pPushConstantRanges( rhs.pPushConstantRanges ) + {} + + PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineLayoutCreateInfo ) - offsetof( PipelineLayoutCreateInfo, pNext ) ); + return *this; + } + + PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineLayoutCreateInfo& operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + + PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + + operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineLayoutCreateInfo const& ) const = default; +#else + bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( setLayoutCount == rhs.setLayoutCount ) + && ( pSetLayouts == rhs.pSetLayouts ) + && ( pushConstantRangeCount == rhs.pushConstantRangeCount ) + && ( pPushConstantRanges == rhs.pPushConstantRanges ); + } + + bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {}; + }; + static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationConservativeStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, + float extraPrimitiveOverestimationSize_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , conservativeRasterizationMode( conservativeRasterizationMode_ ) + , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , conservativeRasterizationMode( rhs.conservativeRasterizationMode ) + , extraPrimitiveOverestimationSize( rhs.extraPrimitiveOverestimationSize ) + {} + + PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) - offsetof( PipelineRasterizationConservativeStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationConservativeStateCreateInfoEXT& operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + conservativeRasterizationMode = conservativeRasterizationMode_; + return *this; + } + + PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT + { + extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_; + return *this; + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode ) + && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize ); + } + + bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled; + float extraPrimitiveOverestimationSize = {}; + }; + static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationDepthClipStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , depthClipEnable( depthClipEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , depthClipEnable( rhs.depthClipEnable ) + {} + + PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) - offsetof( PipelineRasterizationDepthClipStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT& operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT + { + depthClipEnable = depthClipEnable_; + return *this; + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( depthClipEnable == rhs.depthClipEnable ); + } + + bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {}; + }; + static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationLineStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {} ) VULKAN_HPP_NOEXCEPT + : lineRasterizationMode( lineRasterizationMode_ ) + , stippledLineEnable( stippledLineEnable_ ) + , lineStippleFactor( lineStippleFactor_ ) + , lineStipplePattern( lineStipplePattern_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , lineRasterizationMode( rhs.lineRasterizationMode ) + , stippledLineEnable( rhs.stippledLineEnable ) + , lineStippleFactor( rhs.lineStippleFactor ) + , lineStipplePattern( rhs.lineStipplePattern ) + {} + + PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationLineStateCreateInfoEXT ) - offsetof( PipelineRasterizationLineStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationLineStateCreateInfoEXT& operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + { + lineRasterizationMode = lineRasterizationMode_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT + { + stippledLineEnable = stippledLineEnable_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + { + lineStippleFactor = lineStippleFactor_; + return *this; + } + + PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + { + lineStipplePattern = lineStipplePattern_; + return *this; + } + + operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( lineRasterizationMode == rhs.lineRasterizationMode ) + && ( stippledLineEnable == rhs.stippledLineEnable ) + && ( lineStippleFactor == rhs.lineStippleFactor ) + && ( lineStipplePattern == rhs.lineStipplePattern ); + } + + bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; + }; + static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationStateRasterizationOrderAMD + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict ) VULKAN_HPP_NOEXCEPT + : rasterizationOrder( rasterizationOrder_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , rasterizationOrder( rhs.rasterizationOrder ) + {} + + PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateRasterizationOrderAMD ) - offsetof( PipelineRasterizationStateRasterizationOrderAMD, pNext ) ); + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationStateRasterizationOrderAMD& operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationOrder = rasterizationOrder_; + return *this; + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default; +#else + bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( rasterizationOrder == rhs.rasterizationOrder ); + } + + bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict; + }; + static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRasterizationStateStreamCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, + uint32_t rasterizationStream_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , rasterizationStream( rasterizationStream_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , rasterizationStream( rhs.rasterizationStream ) + {} + + PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) - offsetof( PipelineRasterizationStateStreamCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRasterizationStateStreamCreateInfoEXT& operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT + { + rasterizationStream = rasterizationStream_; + return *this; + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( rasterizationStream == rhs.rasterizationStream ); + } + + bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {}; + uint32_t rasterizationStream = {}; + }; + static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineRepresentativeFragmentTestStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , representativeFragmentTestEnable( rhs.representativeFragmentTestEnable ) + {} + + PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) - offsetof( PipelineRepresentativeFragmentTestStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV& operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT + { + representativeFragmentTestEnable = representativeFragmentTestEnable_; + return *this; + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable ); + } + + bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {}; + }; + static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineSampleLocationsStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleLocationsEnable( sampleLocationsEnable_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , sampleLocationsEnable( rhs.sampleLocationsEnable ) + , sampleLocationsInfo( rhs.sampleLocationsInfo ) + {} + + PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineSampleLocationsStateCreateInfoEXT ) - offsetof( PipelineSampleLocationsStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineSampleLocationsStateCreateInfoEXT& operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsEnable = sampleLocationsEnable_; + return *this; + } + + PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleLocationsEnable == rhs.sampleLocationsEnable ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( uint32_t requiredSubgroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : requiredSubgroupSize( requiredSubgroupSize_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , requiredSubgroupSize( rhs.requiredSubgroupSize ) + {} + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) - offsetof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT& operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( requiredSubgroupSize == rhs.requiredSubgroupSize ); + } + + bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + void* pNext = {}; + uint32_t requiredSubgroupSize = {}; + }; + static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineTessellationDomainOriginStateCreateInfo + { + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft ) VULKAN_HPP_NOEXCEPT + : domainOrigin( domainOrigin_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , domainOrigin( rhs.domainOrigin ) + {} + + PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineTessellationDomainOriginStateCreateInfo ) - offsetof( PipelineTessellationDomainOriginStateCreateInfo, pNext ) ); + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineTessellationDomainOriginStateCreateInfo& operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT + { + domainOrigin = domainOrigin_; + return *this; + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default; +#else + bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( domainOrigin == rhs.domainOrigin ); + } + + bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft; + }; + static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct VertexInputBindingDivisorDescriptionEXT + { + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( uint32_t binding_ = {}, + uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + : binding( binding_ ) + , divisor( divisor_ ) + {} + + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : binding( rhs.binding ) + , divisor( rhs.divisor ) + {} + + VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( VertexInputBindingDivisorDescriptionEXT ) ); + return *this; + } + + VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + VertexInputBindingDivisorDescriptionEXT& operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + { + binding = binding_; + return *this; + } + + VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + { + divisor = divisor_; + return *this; + } + + operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default; +#else + bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( binding == rhs.binding ) + && ( divisor == rhs.divisor ); + } + + bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binding = {}; + uint32_t divisor = {}; + }; + static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineVertexInputDivisorStateCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {} ) VULKAN_HPP_NOEXCEPT + : vertexBindingDivisorCount( vertexBindingDivisorCount_ ) + , pVertexBindingDivisors( pVertexBindingDivisors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , vertexBindingDivisorCount( rhs.vertexBindingDivisorCount ) + , pVertexBindingDivisors( rhs.pVertexBindingDivisors ) + {} + + PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) - offsetof( PipelineVertexInputDivisorStateCreateInfoEXT, pNext ) ); + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineVertexInputDivisorStateCreateInfoEXT& operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingDivisorCount = vertexBindingDivisorCount_; + return *this; + } + + PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBindingDivisors = pVertexBindingDivisors_; + return *this; + } + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default; +#else + bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount ) + && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors ); + } + + bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT; + const void* pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {}; + }; + static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportCoarseSampleOrderStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, + uint32_t customSampleOrderCount_ = {}, + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {} ) VULKAN_HPP_NOEXCEPT + : sampleOrderType( sampleOrderType_ ) + , customSampleOrderCount( customSampleOrderCount_ ) + , pCustomSampleOrders( pCustomSampleOrders_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , sampleOrderType( rhs.sampleOrderType ) + , customSampleOrderCount( rhs.customSampleOrderCount ) + , pCustomSampleOrders( rhs.pCustomSampleOrders ) + {} + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) - offsetof( PipelineViewportCoarseSampleOrderStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV& operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT + { + sampleOrderType = sampleOrderType_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT + { + customSampleOrderCount = customSampleOrderCount_; + return *this; + } + + PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT + { + pCustomSampleOrders = pCustomSampleOrders_; + return *this; + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sampleOrderType == rhs.sampleOrderType ) + && ( customSampleOrderCount == rhs.customSampleOrderCount ) + && ( pCustomSampleOrders == rhs.pCustomSampleOrders ); + } + + bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault; + uint32_t customSampleOrderCount = {}; + const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {}; + }; + static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportExclusiveScissorStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, + const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {} ) VULKAN_HPP_NOEXCEPT + : exclusiveScissorCount( exclusiveScissorCount_ ) + , pExclusiveScissors( pExclusiveScissors_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , exclusiveScissorCount( rhs.exclusiveScissorCount ) + , pExclusiveScissors( rhs.pExclusiveScissors ) + {} + + PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) - offsetof( PipelineViewportExclusiveScissorStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV& operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT + { + exclusiveScissorCount = exclusiveScissorCount_; + return *this; + } + + PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT + { + pExclusiveScissors = pExclusiveScissors_; + return *this; + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( exclusiveScissorCount == rhs.exclusiveScissorCount ) + && ( pExclusiveScissors == rhs.pExclusiveScissors ); + } + + bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV; + const void* pNext = {}; + uint32_t exclusiveScissorCount = {}; + const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {}; + }; + static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ShadingRatePaletteNV + { + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) + , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + {} + + VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const& rhs ) VULKAN_HPP_NOEXCEPT + : shadingRatePaletteEntryCount( rhs.shadingRatePaletteEntryCount ) + , pShadingRatePaletteEntries( rhs.pShadingRatePaletteEntries ) + {} + + ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ShadingRatePaletteNV ) ); + return *this; + } + + ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ShadingRatePaletteNV& operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT + { + shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_; + return *this; + } + + ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePaletteEntries = pShadingRatePaletteEntries_; + return *this; + } + + operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShadingRatePaletteNV const& ) const = default; +#else + bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount ) + && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries ); + } + + bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t shadingRatePaletteEntryCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {}; + }; + static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportShadingRateImageStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {} ) VULKAN_HPP_NOEXCEPT + : shadingRateImageEnable( shadingRateImageEnable_ ) + , viewportCount( viewportCount_ ) + , pShadingRatePalettes( pShadingRatePalettes_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , shadingRateImageEnable( rhs.shadingRateImageEnable ) + , viewportCount( rhs.viewportCount ) + , pShadingRatePalettes( rhs.pShadingRatePalettes ) + {} + + PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) - offsetof( PipelineViewportShadingRateImageStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportShadingRateImageStateCreateInfoNV& operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT + { + shadingRateImageEnable = shadingRateImageEnable_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT + { + pShadingRatePalettes = pShadingRatePalettes_; + return *this; + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( shadingRateImageEnable == rhs.shadingRateImageEnable ) + && ( viewportCount == rhs.viewportCount ) + && ( pShadingRatePalettes == rhs.pShadingRatePalettes ); + } + + bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {}; + }; + static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ViewportSwizzleNV + { + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT + : x( x_ ) + , y( y_ ) + , z( z_ ) + , w( w_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const& rhs ) VULKAN_HPP_NOEXCEPT + : x( rhs.x ) + , y( rhs.y ) + , z( rhs.z ) + , w( rhs.w ) + {} + + ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ViewportSwizzleNV ) ); + return *this; + } + + ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ViewportSwizzleNV& operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT + { + x = x_; + return *this; + } + + ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT + { + y = y_; + return *this; + } + + ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT + { + z = z_; + return *this; + } + + ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT + { + w = w_; + return *this; + } + + operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViewportSwizzleNV const& ) const = default; +#else + bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( x == rhs.x ) + && ( y == rhs.y ) + && ( z == rhs.z ) + && ( w == rhs.w ); + } + + bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX; + }; + static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportSwizzleStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , viewportCount( viewportCount_ ) + , pViewportSwizzles( pViewportSwizzles_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , viewportCount( rhs.viewportCount ) + , pViewportSwizzles( rhs.pViewportSwizzles ) + {} + + PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportSwizzleStateCreateInfoNV ) - offsetof( PipelineViewportSwizzleStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportSwizzleStateCreateInfoNV& operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT + { + pViewportSwizzles = pViewportSwizzles_; + return *this; + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportSwizzles == rhs.pViewportSwizzles ); + } + + bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {}; + }; + static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ViewportWScalingNV + { + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, + float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT + : xcoeff( xcoeff_ ) + , ycoeff( ycoeff_ ) + {} + + VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const& rhs ) VULKAN_HPP_NOEXCEPT + : xcoeff( rhs.xcoeff ) + , ycoeff( rhs.ycoeff ) + {} + + ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ViewportWScalingNV ) ); + return *this; + } + + ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ViewportWScalingNV& operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT + { + xcoeff = xcoeff_; + return *this; + } + + ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT + { + ycoeff = ycoeff_; + return *this; + } + + operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViewportWScalingNV const& ) const = default; +#else + bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( xcoeff == rhs.xcoeff ) + && ( ycoeff == rhs.ycoeff ); + } + + bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float xcoeff = {}; + float ycoeff = {}; + }; + static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PipelineViewportWScalingStateCreateInfoNV + { + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, + uint32_t viewportCount_ = {}, + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {} ) VULKAN_HPP_NOEXCEPT + : viewportWScalingEnable( viewportWScalingEnable_ ) + , viewportCount( viewportCount_ ) + , pViewportWScalings( pViewportWScalings_ ) + {} + + VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , viewportWScalingEnable( rhs.viewportWScalingEnable ) + , viewportCount( rhs.viewportCount ) + , pViewportWScalings( rhs.pViewportWScalings ) + {} + + PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PipelineViewportWScalingStateCreateInfoNV ) - offsetof( PipelineViewportWScalingStateCreateInfoNV, pNext ) ); + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PipelineViewportWScalingStateCreateInfoNV& operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT + { + viewportWScalingEnable = viewportWScalingEnable_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT + { + viewportCount = viewportCount_; + return *this; + } + + PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT + { + pViewportWScalings = pViewportWScalings_; + return *this; + } + + operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default; +#else + bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( viewportWScalingEnable == rhs.viewportWScalingEnable ) + && ( viewportCount == rhs.viewportCount ) + && ( pViewportWScalings == rhs.pViewportWScalings ); + } + + bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {}; + uint32_t viewportCount = {}; + const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {}; + }; + static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPresent.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPresent.hpp new file mode 100644 index 00000000..af6963cc --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPresent.hpp @@ -0,0 +1,717 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_GGP + struct PresentFrameTokenGGP + { + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {} ) VULKAN_HPP_NOEXCEPT + : frameToken( frameToken_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , frameToken( rhs.frameToken ) + {} + + PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PresentFrameTokenGGP ) - offsetof( PresentFrameTokenGGP, pNext ) ); + return *this; + } + + PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentFrameTokenGGP& operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT + { + frameToken = frameToken_; + return *this; + } + + operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentFrameTokenGGP const& ) const = default; +#else + bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( frameToken == rhs.frameToken ); + } + + bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP; + const void* pNext = {}; + GgpFrameToken frameToken = {}; + }; + static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_GGP*/ + + struct PresentInfoKHR + { + VULKAN_HPP_CONSTEXPR PresentInfoKHR( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, + uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, + const uint32_t* pImageIndices_ = {}, + VULKAN_HPP_NAMESPACE::Result* pResults_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , swapchainCount( swapchainCount_ ) + , pSwapchains( pSwapchains_ ) + , pImageIndices( pImageIndices_ ) + , pResults( pResults_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , waitSemaphoreCount( rhs.waitSemaphoreCount ) + , pWaitSemaphores( rhs.pWaitSemaphores ) + , swapchainCount( rhs.swapchainCount ) + , pSwapchains( rhs.pSwapchains ) + , pImageIndices( rhs.pImageIndices ) + , pResults( rhs.pResults ) + {} + + PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PresentInfoKHR ) - offsetof( PresentInfoKHR, pNext ) ); + return *this; + } + + PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentInfoKHR& operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT + { + pSwapchains = pSwapchains_; + return *this; + } + + PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT + { + pImageIndices = pImageIndices_; + return *this; + } + + PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT + { + pResults = pResults_; + return *this; + } + + operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentInfoKHR const& ) const = default; +#else + bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pSwapchains == rhs.pSwapchains ) + && ( pImageIndices == rhs.pImageIndices ) + && ( pResults == rhs.pResults ); + } + + bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {}; + const uint32_t* pImageIndices = {}; + VULKAN_HPP_NAMESPACE::Result* pResults = {}; + }; + static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RectLayerKHR + { + VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, + uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT + : offset( offset_ ) + , extent( extent_ ) + , layer( layer_ ) + {} + + VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : offset( rhs.offset ) + , extent( rhs.extent ) + , layer( rhs.layer ) + {} + + explicit RectLayerKHR( Rect2D const& rect2D, + uint32_t layer_ = {} ) + : offset( rect2D.offset ) + , extent( rect2D.extent ) + , layer( layer_ ) + {} + + RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( RectLayerKHR ) ); + return *this; + } + + RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RectLayerKHR& operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D extent_ ) VULKAN_HPP_NOEXCEPT + { + extent = extent_; + return *this; + } + + RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT + { + layer = layer_; + return *this; + } + + operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RectLayerKHR const& ) const = default; +#else + bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( offset == rhs.offset ) + && ( extent == rhs.extent ) + && ( layer == rhs.layer ); + } + + bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Offset2D offset = {}; + VULKAN_HPP_NAMESPACE::Extent2D extent = {}; + uint32_t layer = {}; + }; + static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentRegionKHR + { + VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, + const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT + : rectangleCount( rectangleCount_ ) + , pRectangles( pRectangles_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : rectangleCount( rhs.rectangleCount ) + , pRectangles( rhs.pRectangles ) + {} + + PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PresentRegionKHR ) ); + return *this; + } + + PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentRegionKHR& operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT + { + rectangleCount = rectangleCount_; + return *this; + } + + PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT + { + pRectangles = pRectangles_; + return *this; + } + + operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentRegionKHR const& ) const = default; +#else + bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( rectangleCount == rhs.rectangleCount ) + && ( pRectangles == rhs.pRectangles ); + } + + bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t rectangleCount = {}; + const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {}; + }; + static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentRegionsKHR + { + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pRegions( pRegions_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchainCount( rhs.swapchainCount ) + , pRegions( rhs.pRegions ) + {} + + PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PresentRegionsKHR ) - offsetof( PresentRegionsKHR, pNext ) ); + return *this; + } + + PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentRegionsKHR& operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT + { + pRegions = pRegions_; + return *this; + } + + operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentRegionsKHR const& ) const = default; +#else + bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pRegions == rhs.pRegions ); + } + + bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR; + const void* pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {}; + }; + static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentTimeGOOGLE + { + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, + uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT + : presentID( presentID_ ) + , desiredPresentTime( desiredPresentTime_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const& rhs ) VULKAN_HPP_NOEXCEPT + : presentID( rhs.presentID ) + , desiredPresentTime( rhs.desiredPresentTime ) + {} + + PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( PresentTimeGOOGLE ) ); + return *this; + } + + PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentTimeGOOGLE& operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT + { + presentID = presentID_; + return *this; + } + + PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT + { + desiredPresentTime = desiredPresentTime_; + return *this; + } + + operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentTimeGOOGLE const& ) const = default; +#else + bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( presentID == rhs.presentID ) + && ( desiredPresentTime == rhs.desiredPresentTime ); + } + + bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t presentID = {}; + uint64_t desiredPresentTime = {}; + }; + static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct PresentTimesInfoGOOGLE + { + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {} ) VULKAN_HPP_NOEXCEPT + : swapchainCount( swapchainCount_ ) + , pTimes( pTimes_ ) + {} + + VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , swapchainCount( rhs.swapchainCount ) + , pTimes( rhs.pTimes ) + {} + + PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( PresentTimesInfoGOOGLE ) - offsetof( PresentTimesInfoGOOGLE, pNext ) ); + return *this; + } + + PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + PresentTimesInfoGOOGLE& operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT + { + swapchainCount = swapchainCount_; + return *this; + } + + PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT + { + pTimes = pTimes_; + return *this; + } + + operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default; +#else + bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( swapchainCount == rhs.swapchainCount ) + && ( pTimes == rhs.pTimes ); + } + + bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE; + const void* pNext = {}; + uint32_t swapchainCount = {}; + const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {}; + }; + static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkProtected.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkProtected.hpp new file mode 100644 index 00000000..ff70c8f2 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkProtected.hpp @@ -0,0 +1,176 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ProtectedSubmitInfo + { + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {} ) VULKAN_HPP_NOEXCEPT + : protectedSubmit( protectedSubmit_ ) + {} + + VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , protectedSubmit( rhs.protectedSubmit ) + {} + + ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ProtectedSubmitInfo ) - offsetof( ProtectedSubmitInfo, pNext ) ); + return *this; + } + + ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ProtectedSubmitInfo& operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT + { + protectedSubmit = protectedSubmit_; + return *this; + } + + operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ProtectedSubmitInfo const& ) const = default; +#else + bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( protectedSubmit == rhs.protectedSubmit ); + } + + bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {}; + }; + static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkPush.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPush.hpp new file mode 100644 index 00000000..8a29facc --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkPush.hpp @@ -0,0 +1,103 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkExtension.hpp" +#include "VkCoarse.hpp" +#include "VkCommand.hpp" +#include "VkMetal.hpp" +#include "VkFormat.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkFence.hpp" +#include "VkDedicated.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDispatch.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkQuery.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkQuery.hpp new file mode 100644 index 00000000..4a90cdf9 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkQuery.hpp @@ -0,0 +1,286 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct QueryPoolCreateInfo + { + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, + uint32_t queryCount_ = {}, + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , queryType( queryType_ ) + , queryCount( queryCount_ ) + , pipelineStatistics( pipelineStatistics_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , queryType( rhs.queryType ) + , queryCount( rhs.queryCount ) + , pipelineStatistics( rhs.pipelineStatistics ) + {} + + QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolCreateInfo ) - offsetof( QueryPoolCreateInfo, pNext ) ); + return *this; + } + + QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + QueryPoolCreateInfo& operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT + { + queryType = queryType_; + return *this; + } + + QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT + { + queryCount = queryCount_; + return *this; + } + + QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT + { + pipelineStatistics = pipelineStatistics_; + return *this; + } + + operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPoolCreateInfo const& ) const = default; +#else + bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( queryType == rhs.queryType ) + && ( queryCount == rhs.queryCount ) + && ( pipelineStatistics == rhs.pipelineStatistics ); + } + + bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion; + uint32_t queryCount = {}; + VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {}; + }; + static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct QueryPoolCreateInfoINTEL + { + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual ) VULKAN_HPP_NOEXCEPT + : performanceCountersSampling( performanceCountersSampling_ ) + {} + + VULKAN_HPP_CONSTEXPR QueryPoolCreateInfoINTEL( QueryPoolCreateInfoINTEL const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , performanceCountersSampling( rhs.performanceCountersSampling ) + {} + + QueryPoolCreateInfoINTEL & operator=( QueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( QueryPoolCreateInfoINTEL ) - offsetof( QueryPoolCreateInfoINTEL, pNext ) ); + return *this; + } + + QueryPoolCreateInfoINTEL( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + QueryPoolCreateInfoINTEL& operator=( VkQueryPoolCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + QueryPoolCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + QueryPoolCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT + { + performanceCountersSampling = performanceCountersSampling_; + return *this; + } + + operator VkQueryPoolCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueryPoolCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueryPoolCreateInfoINTEL const& ) const = default; +#else + bool operator==( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( performanceCountersSampling == rhs.performanceCountersSampling ); + } + + bool operator!=( QueryPoolCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfoINTEL; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual; + }; + static_assert( sizeof( QueryPoolCreateInfoINTEL ) == sizeof( VkQueryPoolCreateInfoINTEL ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkQueue.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkQueue.hpp new file mode 100644 index 00000000..40bf003f --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkQueue.hpp @@ -0,0 +1,302 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct QueueFamilyCheckpointPropertiesNV + { + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {} ) VULKAN_HPP_NOEXCEPT + : checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , checkpointExecutionStageMask( rhs.checkpointExecutionStageMask ) + {} + + QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( QueueFamilyCheckpointPropertiesNV ) - offsetof( QueueFamilyCheckpointPropertiesNV, pNext ) ); + return *this; + } + + QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + QueueFamilyCheckpointPropertiesNV& operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default; +#else + bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask ); + } + + bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {}; + }; + static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct QueueFamilyProperties + { + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, + uint32_t queueCount_ = {}, + uint32_t timestampValidBits_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFlags( queueFlags_ ) + , queueCount( queueCount_ ) + , timestampValidBits( timestampValidBits_ ) + , minImageTransferGranularity( minImageTransferGranularity_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : queueFlags( rhs.queueFlags ) + , queueCount( rhs.queueCount ) + , timestampValidBits( rhs.timestampValidBits ) + , minImageTransferGranularity( rhs.minImageTransferGranularity ) + {} + + QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( QueueFamilyProperties ) ); + return *this; + } + + QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + QueueFamilyProperties& operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyProperties const& ) const = default; +#else + bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( queueFlags == rhs.queueFlags ) + && ( queueCount == rhs.queueCount ) + && ( timestampValidBits == rhs.timestampValidBits ) + && ( minImageTransferGranularity == rhs.minImageTransferGranularity ); + } + + bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {}; + uint32_t queueCount = {}; + uint32_t timestampValidBits = {}; + VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {}; + }; + static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct QueueFamilyProperties2 + { + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : queueFamilyProperties( queueFamilyProperties_ ) + {} + + VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , queueFamilyProperties( rhs.queueFamilyProperties ) + {} + + QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( QueueFamilyProperties2 ) - offsetof( QueueFamilyProperties2, pNext ) ); + return *this; + } + + QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + QueueFamilyProperties2& operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( QueueFamilyProperties2 const& ) const = default; +#else + bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( queueFamilyProperties == rhs.queueFamilyProperties ); + } + + bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {}; + }; + static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkRay.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRay.hpp new file mode 100644 index 00000000..177cd8ed --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRay.hpp @@ -0,0 +1,387 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRay.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct RayTracingShaderGroupCreateInfoNV + { + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral, + uint32_t generalShader_ = {}, + uint32_t closestHitShader_ = {}, + uint32_t anyHitShader_ = {}, + uint32_t intersectionShader_ = {} ) VULKAN_HPP_NOEXCEPT + : type( type_ ) + , generalShader( generalShader_ ) + , closestHitShader( closestHitShader_ ) + , anyHitShader( anyHitShader_ ) + , intersectionShader( intersectionShader_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , type( rhs.type ) + , generalShader( rhs.generalShader ) + , closestHitShader( rhs.closestHitShader ) + , anyHitShader( rhs.anyHitShader ) + , intersectionShader( rhs.intersectionShader ) + {} + + RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RayTracingShaderGroupCreateInfoNV ) - offsetof( RayTracingShaderGroupCreateInfoNV, pNext ) ); + return *this; + } + + RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RayTracingShaderGroupCreateInfoNV& operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT + { + generalShader = generalShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT + { + closestHitShader = closestHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT + { + anyHitShader = anyHitShader_; + return *this; + } + + RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT + { + intersectionShader = intersectionShader_; + return *this; + } + + operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default; +#else + bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( type == rhs.type ) + && ( generalShader == rhs.generalShader ) + && ( closestHitShader == rhs.closestHitShader ) + && ( anyHitShader == rhs.anyHitShader ) + && ( intersectionShader == rhs.intersectionShader ); + } + + bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV::eGeneral; + uint32_t generalShader = {}; + uint32_t closestHitShader = {}; + uint32_t anyHitShader = {}; + uint32_t intersectionShader = {}; + }; + static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RayTracingPipelineCreateInfoNV + { + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, + uint32_t stageCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, + uint32_t groupCount_ = {}, + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, + uint32_t maxRecursionDepth_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, + int32_t basePipelineIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , stageCount( stageCount_ ) + , pStages( pStages_ ) + , groupCount( groupCount_ ) + , pGroups( pGroups_ ) + , maxRecursionDepth( maxRecursionDepth_ ) + , layout( layout_ ) + , basePipelineHandle( basePipelineHandle_ ) + , basePipelineIndex( basePipelineIndex_ ) + {} + + VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , stageCount( rhs.stageCount ) + , pStages( rhs.pStages ) + , groupCount( rhs.groupCount ) + , pGroups( rhs.pGroups ) + , maxRecursionDepth( rhs.maxRecursionDepth ) + , layout( rhs.layout ) + , basePipelineHandle( rhs.basePipelineHandle ) + , basePipelineIndex( rhs.basePipelineIndex ) + {} + + RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RayTracingPipelineCreateInfoNV ) - offsetof( RayTracingPipelineCreateInfoNV, pNext ) ); + return *this; + } + + RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RayTracingPipelineCreateInfoNV& operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT + { + stageCount = stageCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT + { + pStages = pStages_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT + { + groupCount = groupCount_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT + { + pGroups = pGroups_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT + { + maxRecursionDepth = maxRecursionDepth_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + { + layout = layout_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineHandle = basePipelineHandle_; + return *this; + } + + RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT + { + basePipelineIndex = basePipelineIndex_; + return *this; + } + + operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default; +#else + bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( stageCount == rhs.stageCount ) + && ( pStages == rhs.pStages ) + && ( groupCount == rhs.groupCount ) + && ( pGroups == rhs.pGroups ) + && ( maxRecursionDepth == rhs.maxRecursionDepth ) + && ( layout == rhs.layout ) + && ( basePipelineHandle == rhs.basePipelineHandle ) + && ( basePipelineIndex == rhs.basePipelineIndex ); + } + + bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {}; + uint32_t stageCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {}; + uint32_t groupCount = {}; + const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {}; + uint32_t maxRecursionDepth = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; + VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {}; + int32_t basePipelineIndex = {}; + }; + static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkRect.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRect.hpp new file mode 100644 index 00000000..4242c07a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRect.hpp @@ -0,0 +1,108 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkRefresh.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRefresh.hpp new file mode 100644 index 00000000..33f73fe1 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRefresh.hpp @@ -0,0 +1,165 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct RefreshCycleDurationGOOGLE + { + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT + : refreshDuration( refreshDuration_ ) + {} + + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const& rhs ) VULKAN_HPP_NOEXCEPT + : refreshDuration( rhs.refreshDuration ) + {} + + RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( RefreshCycleDurationGOOGLE ) ); + return *this; + } + + RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RefreshCycleDurationGOOGLE& operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default; +#else + bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( refreshDuration == rhs.refreshDuration ); + } + + bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint64_t refreshDuration = {}; + }; + static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkRender.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRender.hpp new file mode 100644 index 00000000..3bef6257 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkRender.hpp @@ -0,0 +1,1710 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkRender.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct RenderPassAttachmentBeginInfoKHR + { + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfoKHR( uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfoKHR( RenderPassAttachmentBeginInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , attachmentCount( rhs.attachmentCount ) + , pAttachments( rhs.pAttachments ) + {} + + RenderPassAttachmentBeginInfoKHR & operator=( RenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassAttachmentBeginInfoKHR ) - offsetof( RenderPassAttachmentBeginInfoKHR, pNext ) ); + return *this; + } + + RenderPassAttachmentBeginInfoKHR( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassAttachmentBeginInfoKHR& operator=( VkRenderPassAttachmentBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassAttachmentBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassAttachmentBeginInfoKHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassAttachmentBeginInfoKHR & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + + operator VkRenderPassAttachmentBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassAttachmentBeginInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassAttachmentBeginInfoKHR const& ) const = default; +#else + bool operator==( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ); + } + + bool operator!=( RenderPassAttachmentBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfoKHR; + const void* pNext = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {}; + }; + static_assert( sizeof( RenderPassAttachmentBeginInfoKHR ) == sizeof( VkRenderPassAttachmentBeginInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassBeginInfo + { + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, + VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, + uint32_t clearValueCount_ = {}, + const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {} ) VULKAN_HPP_NOEXCEPT + : renderPass( renderPass_ ) + , framebuffer( framebuffer_ ) + , renderArea( renderArea_ ) + , clearValueCount( clearValueCount_ ) + , pClearValues( pClearValues_ ) + {} + + VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , renderPass( rhs.renderPass ) + , framebuffer( rhs.framebuffer ) + , renderArea( rhs.renderArea ) + , clearValueCount( rhs.clearValueCount ) + , pClearValues( rhs.pClearValues ) + {} + + RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassBeginInfo ) - offsetof( RenderPassBeginInfo, pNext ) ); + return *this; + } + + RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassBeginInfo& operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT + { + renderPass = renderPass_; + return *this; + } + + RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT + { + framebuffer = framebuffer_; + return *this; + } + + RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D renderArea_ ) VULKAN_HPP_NOEXCEPT + { + renderArea = renderArea_; + return *this; + } + + RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT + { + clearValueCount = clearValueCount_; + return *this; + } + + RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT + { + pClearValues = pClearValues_; + return *this; + } + + operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassBeginInfo const& ) const = default; +#else + bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( renderPass == rhs.renderPass ) + && ( framebuffer == rhs.framebuffer ) + && ( renderArea == rhs.renderArea ) + && ( clearValueCount == rhs.clearValueCount ) + && ( pClearValues == rhs.pClearValues ); + } + + bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPass renderPass = {}; + VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {}; + VULKAN_HPP_NAMESPACE::Rect2D renderArea = {}; + uint32_t clearValueCount = {}; + const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {}; + }; + static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDescription + { + VULKAN_HPP_CONSTEXPR SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const& rhs ) VULKAN_HPP_NOEXCEPT + : flags( rhs.flags ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , inputAttachmentCount( rhs.inputAttachmentCount ) + , pInputAttachments( rhs.pInputAttachments ) + , colorAttachmentCount( rhs.colorAttachmentCount ) + , pColorAttachments( rhs.pColorAttachments ) + , pResolveAttachments( rhs.pResolveAttachments ) + , pDepthStencilAttachment( rhs.pDepthStencilAttachment ) + , preserveAttachmentCount( rhs.preserveAttachmentCount ) + , pPreserveAttachments( rhs.pPreserveAttachments ) + {} + + SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SubpassDescription ) ); + return *this; + } + + SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassDescription& operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + + SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + + SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + + SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + + operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescription const& ) const = default; +#else + bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t* pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDependency + { + VULKAN_HPP_CONSTEXPR SubpassDependency( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const& rhs ) VULKAN_HPP_NOEXCEPT + : srcSubpass( rhs.srcSubpass ) + , dstSubpass( rhs.dstSubpass ) + , srcStageMask( rhs.srcStageMask ) + , dstStageMask( rhs.dstStageMask ) + , srcAccessMask( rhs.srcAccessMask ) + , dstAccessMask( rhs.dstAccessMask ) + , dependencyFlags( rhs.dependencyFlags ) + {} + + SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SubpassDependency ) ); + return *this; + } + + SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassDependency& operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDependency const& ) const = default; +#else + bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ); + } + + bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + }; + static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassCreateInfo + { + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , attachmentCount( rhs.attachmentCount ) + , pAttachments( rhs.pAttachments ) + , subpassCount( rhs.subpassCount ) + , pSubpasses( rhs.pSubpasses ) + , dependencyCount( rhs.dependencyCount ) + , pDependencies( rhs.pDependencies ) + {} + + RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassCreateInfo ) - offsetof( RenderPassCreateInfo, pNext ) ); + return *this; + } + + RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassCreateInfo& operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + + RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + + RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + + operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassCreateInfo const& ) const = default; +#else + bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ); + } + + bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {}; + }; + static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDescription2KHR + { + VULKAN_HPP_CONSTEXPR SubpassDescription2KHR( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, + uint32_t viewMask_ = {}, + uint32_t inputAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment_ = {}, + uint32_t preserveAttachmentCount_ = {}, + const uint32_t* pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , pipelineBindPoint( pipelineBindPoint_ ) + , viewMask( viewMask_ ) + , inputAttachmentCount( inputAttachmentCount_ ) + , pInputAttachments( pInputAttachments_ ) + , colorAttachmentCount( colorAttachmentCount_ ) + , pColorAttachments( pColorAttachments_ ) + , pResolveAttachments( pResolveAttachments_ ) + , pDepthStencilAttachment( pDepthStencilAttachment_ ) + , preserveAttachmentCount( preserveAttachmentCount_ ) + , pPreserveAttachments( pPreserveAttachments_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescription2KHR( SubpassDescription2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , pipelineBindPoint( rhs.pipelineBindPoint ) + , viewMask( rhs.viewMask ) + , inputAttachmentCount( rhs.inputAttachmentCount ) + , pInputAttachments( rhs.pInputAttachments ) + , colorAttachmentCount( rhs.colorAttachmentCount ) + , pColorAttachments( rhs.pColorAttachments ) + , pResolveAttachments( rhs.pResolveAttachments ) + , pDepthStencilAttachment( rhs.pDepthStencilAttachment ) + , preserveAttachmentCount( rhs.preserveAttachmentCount ) + , pPreserveAttachments( rhs.pPreserveAttachments ) + {} + + SubpassDescription2KHR & operator=( SubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubpassDescription2KHR ) - offsetof( SubpassDescription2KHR, pNext ) ); + return *this; + } + + SubpassDescription2KHR( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassDescription2KHR& operator=( VkSubpassDescription2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassDescription2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescription2KHR & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SubpassDescription2KHR & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBindPoint = pipelineBindPoint_; + return *this; + } + + SubpassDescription2KHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + { + viewMask = viewMask_; + return *this; + } + + SubpassDescription2KHR & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + inputAttachmentCount = inputAttachmentCount_; + return *this; + } + + SubpassDescription2KHR & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pInputAttachments = pInputAttachments_; + return *this; + } + + SubpassDescription2KHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + colorAttachmentCount = colorAttachmentCount_; + return *this; + } + + SubpassDescription2KHR & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pColorAttachments = pColorAttachments_; + return *this; + } + + SubpassDescription2KHR & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pResolveAttachments = pResolveAttachments_; + return *this; + } + + SubpassDescription2KHR & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilAttachment = pDepthStencilAttachment_; + return *this; + } + + SubpassDescription2KHR & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + preserveAttachmentCount = preserveAttachmentCount_; + return *this; + } + + SubpassDescription2KHR & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pPreserveAttachments = pPreserveAttachments_; + return *this; + } + + operator VkSubpassDescription2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescription2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescription2KHR const& ) const = default; +#else + bool operator==( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( pipelineBindPoint == rhs.pipelineBindPoint ) + && ( viewMask == rhs.viewMask ) + && ( inputAttachmentCount == rhs.inputAttachmentCount ) + && ( pInputAttachments == rhs.pInputAttachments ) + && ( colorAttachmentCount == rhs.colorAttachmentCount ) + && ( pColorAttachments == rhs.pColorAttachments ) + && ( pResolveAttachments == rhs.pResolveAttachments ) + && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment ) + && ( preserveAttachmentCount == rhs.preserveAttachmentCount ) + && ( pPreserveAttachments == rhs.pPreserveAttachments ); + } + + bool operator!=( SubpassDescription2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {}; + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics; + uint32_t viewMask = {}; + uint32_t inputAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pInputAttachments = {}; + uint32_t colorAttachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pColorAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pResolveAttachments = {}; + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilAttachment = {}; + uint32_t preserveAttachmentCount = {}; + const uint32_t* pPreserveAttachments = {}; + }; + static_assert( sizeof( SubpassDescription2KHR ) == sizeof( VkSubpassDescription2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDependency2KHR + { + VULKAN_HPP_CONSTEXPR SubpassDependency2KHR( uint32_t srcSubpass_ = {}, + uint32_t dstSubpass_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, + int32_t viewOffset_ = {} ) VULKAN_HPP_NOEXCEPT + : srcSubpass( srcSubpass_ ) + , dstSubpass( dstSubpass_ ) + , srcStageMask( srcStageMask_ ) + , dstStageMask( dstStageMask_ ) + , srcAccessMask( srcAccessMask_ ) + , dstAccessMask( dstAccessMask_ ) + , dependencyFlags( dependencyFlags_ ) + , viewOffset( viewOffset_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDependency2KHR( SubpassDependency2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , srcSubpass( rhs.srcSubpass ) + , dstSubpass( rhs.dstSubpass ) + , srcStageMask( rhs.srcStageMask ) + , dstStageMask( rhs.dstStageMask ) + , srcAccessMask( rhs.srcAccessMask ) + , dstAccessMask( rhs.dstAccessMask ) + , dependencyFlags( rhs.dependencyFlags ) + , viewOffset( rhs.viewOffset ) + {} + + SubpassDependency2KHR & operator=( SubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubpassDependency2KHR ) - offsetof( SubpassDependency2KHR, pNext ) ); + return *this; + } + + SubpassDependency2KHR( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassDependency2KHR& operator=( VkSubpassDependency2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassDependency2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDependency2KHR & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT + { + srcSubpass = srcSubpass_; + return *this; + } + + SubpassDependency2KHR & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT + { + dstSubpass = dstSubpass_; + return *this; + } + + SubpassDependency2KHR & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT + { + srcStageMask = srcStageMask_; + return *this; + } + + SubpassDependency2KHR & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + dstStageMask = dstStageMask_; + return *this; + } + + SubpassDependency2KHR & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + srcAccessMask = srcAccessMask_; + return *this; + } + + SubpassDependency2KHR & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT + { + dstAccessMask = dstAccessMask_; + return *this; + } + + SubpassDependency2KHR & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT + { + dependencyFlags = dependencyFlags_; + return *this; + } + + SubpassDependency2KHR & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT + { + viewOffset = viewOffset_; + return *this; + } + + operator VkSubpassDependency2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDependency2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDependency2KHR const& ) const = default; +#else + bool operator==( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( srcSubpass == rhs.srcSubpass ) + && ( dstSubpass == rhs.dstSubpass ) + && ( srcStageMask == rhs.srcStageMask ) + && ( dstStageMask == rhs.dstStageMask ) + && ( srcAccessMask == rhs.srcAccessMask ) + && ( dstAccessMask == rhs.dstAccessMask ) + && ( dependencyFlags == rhs.dependencyFlags ) + && ( viewOffset == rhs.viewOffset ); + } + + bool operator!=( SubpassDependency2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2KHR; + const void* pNext = {}; + uint32_t srcSubpass = {}; + uint32_t dstSubpass = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {}; + VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {}; + VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {}; + VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {}; + int32_t viewOffset = {}; + }; + static_assert( sizeof( SubpassDependency2KHR ) == sizeof( VkSubpassDependency2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassCreateInfo2KHR + { + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, + uint32_t attachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments_ = {}, + uint32_t subpassCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses_ = {}, + uint32_t dependencyCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies_ = {}, + uint32_t correlatedViewMaskCount_ = {}, + const uint32_t* pCorrelatedViewMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , attachmentCount( attachmentCount_ ) + , pAttachments( pAttachments_ ) + , subpassCount( subpassCount_ ) + , pSubpasses( pSubpasses_ ) + , dependencyCount( dependencyCount_ ) + , pDependencies( pDependencies_ ) + , correlatedViewMaskCount( correlatedViewMaskCount_ ) + , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2KHR( RenderPassCreateInfo2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , attachmentCount( rhs.attachmentCount ) + , pAttachments( rhs.pAttachments ) + , subpassCount( rhs.subpassCount ) + , pSubpasses( rhs.pSubpasses ) + , dependencyCount( rhs.dependencyCount ) + , pDependencies( rhs.pDependencies ) + , correlatedViewMaskCount( rhs.correlatedViewMaskCount ) + , pCorrelatedViewMasks( rhs.pCorrelatedViewMasks ) + {} + + RenderPassCreateInfo2KHR & operator=( RenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassCreateInfo2KHR ) - offsetof( RenderPassCreateInfo2KHR, pNext ) ); + return *this; + } + + RenderPassCreateInfo2KHR( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassCreateInfo2KHR& operator=( VkRenderPassCreateInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassCreateInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassCreateInfo2KHR & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + RenderPassCreateInfo2KHR & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentCount = attachmentCount_; + return *this; + } + + RenderPassCreateInfo2KHR & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments_ ) VULKAN_HPP_NOEXCEPT + { + pAttachments = pAttachments_; + return *this; + } + + RenderPassCreateInfo2KHR & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassCreateInfo2KHR & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses_ ) VULKAN_HPP_NOEXCEPT + { + pSubpasses = pSubpasses_; + return *this; + } + + RenderPassCreateInfo2KHR & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassCreateInfo2KHR & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies_ ) VULKAN_HPP_NOEXCEPT + { + pDependencies = pDependencies_; + return *this; + } + + RenderPassCreateInfo2KHR & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlatedViewMaskCount = correlatedViewMaskCount_; + return *this; + } + + RenderPassCreateInfo2KHR & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelatedViewMasks = pCorrelatedViewMasks_; + return *this; + } + + operator VkRenderPassCreateInfo2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassCreateInfo2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassCreateInfo2KHR const& ) const = default; +#else + bool operator==( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( attachmentCount == rhs.attachmentCount ) + && ( pAttachments == rhs.pAttachments ) + && ( subpassCount == rhs.subpassCount ) + && ( pSubpasses == rhs.pSubpasses ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pDependencies == rhs.pDependencies ) + && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount ) + && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks ); + } + + bool operator!=( RenderPassCreateInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2KHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {}; + uint32_t attachmentCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR* pAttachments = {}; + uint32_t subpassCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDescription2KHR* pSubpasses = {}; + uint32_t dependencyCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassDependency2KHR* pDependencies = {}; + uint32_t correlatedViewMaskCount = {}; + const uint32_t* pCorrelatedViewMasks = {}; + }; + static_assert( sizeof( RenderPassCreateInfo2KHR ) == sizeof( VkRenderPassCreateInfo2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassFragmentDensityMapCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fragmentDensityMapAttachment( rhs.fragmentDensityMapAttachment ) + {} + + RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) - offsetof( RenderPassFragmentDensityMapCreateInfoEXT, pNext ) ); + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassFragmentDensityMapCreateInfoEXT& operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT + { + fragmentDensityMapAttachment = fragmentDensityMapAttachment_; + return *this; + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default; +#else + bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment ); + } + + bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {}; + }; + static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassInputAttachmentAspectCreateInfo + { + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectReferenceCount( aspectReferenceCount_ ) + , pAspectReferences( pAspectReferences_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , aspectReferenceCount( rhs.aspectReferenceCount ) + , pAspectReferences( rhs.pAspectReferences ) + {} + + RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassInputAttachmentAspectCreateInfo ) - offsetof( RenderPassInputAttachmentAspectCreateInfo, pNext ) ); + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassInputAttachmentAspectCreateInfo& operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT + { + aspectReferenceCount = aspectReferenceCount_; + return *this; + } + + RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT + { + pAspectReferences = pAspectReferences_; + return *this; + } + + operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default; +#else + bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( aspectReferenceCount == rhs.aspectReferenceCount ) + && ( pAspectReferences == rhs.pAspectReferences ); + } + + bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo; + const void* pNext = {}; + uint32_t aspectReferenceCount = {}; + const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {}; + }; + static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassMultiviewCreateInfo + { + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( uint32_t subpassCount_ = {}, + const uint32_t* pViewMasks_ = {}, + uint32_t dependencyCount_ = {}, + const int32_t* pViewOffsets_ = {}, + uint32_t correlationMaskCount_ = {}, + const uint32_t* pCorrelationMasks_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassCount( subpassCount_ ) + , pViewMasks( pViewMasks_ ) + , dependencyCount( dependencyCount_ ) + , pViewOffsets( pViewOffsets_ ) + , correlationMaskCount( correlationMaskCount_ ) + , pCorrelationMasks( pCorrelationMasks_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , subpassCount( rhs.subpassCount ) + , pViewMasks( rhs.pViewMasks ) + , dependencyCount( rhs.dependencyCount ) + , pViewOffsets( rhs.pViewOffsets ) + , correlationMaskCount( rhs.correlationMaskCount ) + , pCorrelationMasks( rhs.pCorrelationMasks ) + {} + + RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassMultiviewCreateInfo ) - offsetof( RenderPassMultiviewCreateInfo, pNext ) ); + return *this; + } + + RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassMultiviewCreateInfo& operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT + { + subpassCount = subpassCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT + { + pViewMasks = pViewMasks_; + return *this; + } + + RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT + { + dependencyCount = dependencyCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT + { + pViewOffsets = pViewOffsets_; + return *this; + } + + RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT + { + correlationMaskCount = correlationMaskCount_; + return *this; + } + + RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT + { + pCorrelationMasks = pCorrelationMasks_; + return *this; + } + + operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default; +#else + bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( subpassCount == rhs.subpassCount ) + && ( pViewMasks == rhs.pViewMasks ) + && ( dependencyCount == rhs.dependencyCount ) + && ( pViewOffsets == rhs.pViewOffsets ) + && ( correlationMaskCount == rhs.correlationMaskCount ) + && ( pCorrelationMasks == rhs.pCorrelationMasks ); + } + + bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo; + const void* pNext = {}; + uint32_t subpassCount = {}; + const uint32_t* pViewMasks = {}; + uint32_t dependencyCount = {}; + const int32_t* pViewOffsets = {}; + uint32_t correlationMaskCount = {}; + const uint32_t* pCorrelationMasks = {}; + }; + static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassSampleLocationsEXT + { + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT + : subpassIndex( subpassIndex_ ) + , sampleLocationsInfo( sampleLocationsInfo_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : subpassIndex( rhs.subpassIndex ) + , sampleLocationsInfo( rhs.sampleLocationsInfo ) + {} + + SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SubpassSampleLocationsEXT ) ); + return *this; + } + + SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassSampleLocationsEXT& operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT + { + subpassIndex = subpassIndex_; + return *this; + } + + SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT + { + sampleLocationsInfo = sampleLocationsInfo_; + return *this; + } + + operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassSampleLocationsEXT const& ) const = default; +#else + bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( subpassIndex == rhs.subpassIndex ) + && ( sampleLocationsInfo == rhs.sampleLocationsInfo ); + } + + bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t subpassIndex = {}; + VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {}; + }; + static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct RenderPassSampleLocationsBeginInfoEXT + { + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( uint32_t attachmentInitialSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, + uint32_t postSubpassSampleLocationsCount_ = {}, + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT + : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) + , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) + , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) + , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + {} + + VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , attachmentInitialSampleLocationsCount( rhs.attachmentInitialSampleLocationsCount ) + , pAttachmentInitialSampleLocations( rhs.pAttachmentInitialSampleLocations ) + , postSubpassSampleLocationsCount( rhs.postSubpassSampleLocationsCount ) + , pPostSubpassSampleLocations( rhs.pPostSubpassSampleLocations ) + {} + + RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( RenderPassSampleLocationsBeginInfoEXT ) - offsetof( RenderPassSampleLocationsBeginInfoEXT, pNext ) ); + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + RenderPassSampleLocationsBeginInfoEXT& operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT + { + postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_; + return *this; + } + + RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT + { + pPostSubpassSampleLocations = pPostSubpassSampleLocations_; + return *this; + } + + operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default; +#else + bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount ) + && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations ) + && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount ) + && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations ); + } + + bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT; + const void* pNext = {}; + uint32_t attachmentInitialSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {}; + uint32_t postSubpassSampleLocationsCount = {}; + const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {}; + }; + static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSample.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSample.hpp new file mode 100644 index 00000000..6317148c --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSample.hpp @@ -0,0 +1,111 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSampler.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSampler.hpp new file mode 100644 index 00000000..ee23378f --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSampler.hpp @@ -0,0 +1,712 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SamplerCreateInfo + { + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, + float mipLodBias_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, + float maxAnisotropy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, + VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, + float minLod_ = {}, + float maxLod_ = {}, + VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , magFilter( magFilter_ ) + , minFilter( minFilter_ ) + , mipmapMode( mipmapMode_ ) + , addressModeU( addressModeU_ ) + , addressModeV( addressModeV_ ) + , addressModeW( addressModeW_ ) + , mipLodBias( mipLodBias_ ) + , anisotropyEnable( anisotropyEnable_ ) + , maxAnisotropy( maxAnisotropy_ ) + , compareEnable( compareEnable_ ) + , compareOp( compareOp_ ) + , minLod( minLod_ ) + , maxLod( maxLod_ ) + , borderColor( borderColor_ ) + , unnormalizedCoordinates( unnormalizedCoordinates_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , magFilter( rhs.magFilter ) + , minFilter( rhs.minFilter ) + , mipmapMode( rhs.mipmapMode ) + , addressModeU( rhs.addressModeU ) + , addressModeV( rhs.addressModeV ) + , addressModeW( rhs.addressModeW ) + , mipLodBias( rhs.mipLodBias ) + , anisotropyEnable( rhs.anisotropyEnable ) + , maxAnisotropy( rhs.maxAnisotropy ) + , compareEnable( rhs.compareEnable ) + , compareOp( rhs.compareOp ) + , minLod( rhs.minLod ) + , maxLod( rhs.maxLod ) + , borderColor( rhs.borderColor ) + , unnormalizedCoordinates( rhs.unnormalizedCoordinates ) + {} + + SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SamplerCreateInfo ) - offsetof( SamplerCreateInfo, pNext ) ); + return *this; + } + + SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SamplerCreateInfo& operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT + { + magFilter = magFilter_; + return *this; + } + + SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT + { + minFilter = minFilter_; + return *this; + } + + SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT + { + mipmapMode = mipmapMode_; + return *this; + } + + SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT + { + addressModeU = addressModeU_; + return *this; + } + + SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT + { + addressModeV = addressModeV_; + return *this; + } + + SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT + { + addressModeW = addressModeW_; + return *this; + } + + SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT + { + mipLodBias = mipLodBias_; + return *this; + } + + SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT + { + anisotropyEnable = anisotropyEnable_; + return *this; + } + + SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT + { + maxAnisotropy = maxAnisotropy_; + return *this; + } + + SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT + { + compareEnable = compareEnable_; + return *this; + } + + SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT + { + compareOp = compareOp_; + return *this; + } + + SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT + { + minLod = minLod_; + return *this; + } + + SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT + { + maxLod = maxLod_; + return *this; + } + + SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT + { + borderColor = borderColor_; + return *this; + } + + SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT + { + unnormalizedCoordinates = unnormalizedCoordinates_; + return *this; + } + + operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerCreateInfo const& ) const = default; +#else + bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( magFilter == rhs.magFilter ) + && ( minFilter == rhs.minFilter ) + && ( mipmapMode == rhs.mipmapMode ) + && ( addressModeU == rhs.addressModeU ) + && ( addressModeV == rhs.addressModeV ) + && ( addressModeW == rhs.addressModeW ) + && ( mipLodBias == rhs.mipLodBias ) + && ( anisotropyEnable == rhs.anisotropyEnable ) + && ( maxAnisotropy == rhs.maxAnisotropy ) + && ( compareEnable == rhs.compareEnable ) + && ( compareOp == rhs.compareOp ) + && ( minLod == rhs.minLod ) + && ( maxLod == rhs.maxLod ) + && ( borderColor == rhs.borderColor ) + && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates ); + } + + bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {}; + VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat; + float mipLodBias = {}; + VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {}; + float maxAnisotropy = {}; + VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {}; + VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever; + float minLod = {}; + float maxLod = {}; + VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack; + VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {}; + }; + static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SamplerReductionModeCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfoEXT( VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT::eWeightedAverage ) VULKAN_HPP_NOEXCEPT + : reductionMode( reductionMode_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfoEXT( SamplerReductionModeCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , reductionMode( rhs.reductionMode ) + {} + + SamplerReductionModeCreateInfoEXT & operator=( SamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SamplerReductionModeCreateInfoEXT ) - offsetof( SamplerReductionModeCreateInfoEXT, pNext ) ); + return *this; + } + + SamplerReductionModeCreateInfoEXT( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SamplerReductionModeCreateInfoEXT& operator=( VkSamplerReductionModeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SamplerReductionModeCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerReductionModeCreateInfoEXT & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode_ ) VULKAN_HPP_NOEXCEPT + { + reductionMode = reductionMode_; + return *this; + } + + operator VkSamplerReductionModeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerReductionModeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerReductionModeCreateInfoEXT const& ) const = default; +#else + bool operator==( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( reductionMode == rhs.reductionMode ); + } + + bool operator!=( SamplerReductionModeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT::eWeightedAverage; + }; + static_assert( sizeof( SamplerReductionModeCreateInfoEXT ) == sizeof( VkSamplerReductionModeCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SamplerYcbcrConversionCreateInfo + { + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, + VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, + VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {} ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , ycbcrModel( ycbcrModel_ ) + , ycbcrRange( ycbcrRange_ ) + , components( components_ ) + , xChromaOffset( xChromaOffset_ ) + , yChromaOffset( yChromaOffset_ ) + , chromaFilter( chromaFilter_ ) + , forceExplicitReconstruction( forceExplicitReconstruction_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , format( rhs.format ) + , ycbcrModel( rhs.ycbcrModel ) + , ycbcrRange( rhs.ycbcrRange ) + , components( rhs.components ) + , xChromaOffset( rhs.xChromaOffset ) + , yChromaOffset( rhs.yChromaOffset ) + , chromaFilter( rhs.chromaFilter ) + , forceExplicitReconstruction( rhs.forceExplicitReconstruction ) + {} + + SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionCreateInfo ) - offsetof( SamplerYcbcrConversionCreateInfo, pNext ) ); + return *this; + } + + SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SamplerYcbcrConversionCreateInfo& operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT + { + format = format_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrModel = ycbcrModel_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT + { + ycbcrRange = ycbcrRange_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping components_ ) VULKAN_HPP_NOEXCEPT + { + components = components_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + xChromaOffset = xChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT + { + yChromaOffset = yChromaOffset_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT + { + chromaFilter = chromaFilter_; + return *this; + } + + SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT + { + forceExplicitReconstruction = forceExplicitReconstruction_; + return *this; + } + + operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default; +#else + bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( format == rhs.format ) + && ( ycbcrModel == rhs.ycbcrModel ) + && ( ycbcrRange == rhs.ycbcrRange ) + && ( components == rhs.components ) + && ( xChromaOffset == rhs.xChromaOffset ) + && ( yChromaOffset == rhs.yChromaOffset ) + && ( chromaFilter == rhs.chromaFilter ) + && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction ); + } + + bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity; + VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull; + VULKAN_HPP_NAMESPACE::ComponentMapping components = {}; + VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven; + VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest; + VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SamplerYcbcrConversionImageFormatProperties + { + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT + : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , combinedImageSamplerDescriptorCount( rhs.combinedImageSamplerDescriptorCount ) + {} + + SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionImageFormatProperties ) - offsetof( SamplerYcbcrConversionImageFormatProperties, pNext ) ); + return *this; + } + + SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SamplerYcbcrConversionImageFormatProperties& operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default; +#else + bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount ); + } + + bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties; + void* pNext = {}; + uint32_t combinedImageSamplerDescriptorCount = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SamplerYcbcrConversionInfo + { + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {} ) VULKAN_HPP_NOEXCEPT + : conversion( conversion_ ) + {} + + VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , conversion( rhs.conversion ) + {} + + SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SamplerYcbcrConversionInfo ) - offsetof( SamplerYcbcrConversionInfo, pNext ) ); + return *this; + } + + SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SamplerYcbcrConversionInfo& operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT + { + conversion = conversion_; + return *this; + } + + operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default; +#else + bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( conversion == rhs.conversion ); + } + + bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {}; + }; + static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSemaphore.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSemaphore.hpp new file mode 100644 index 00000000..8788747b --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSemaphore.hpp @@ -0,0 +1,359 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SemaphoreCreateInfo + { + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + {} + + SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreCreateInfo ) - offsetof( SemaphoreCreateInfo, pNext ) ); + return *this; + } + + SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SemaphoreCreateInfo& operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreCreateInfo const& ) const = default; +#else + bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ); + } + + bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {}; + }; + static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SemaphoreGetFdInfoKHR + { + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , semaphore( rhs.semaphore ) + , handleType( rhs.handleType ) + {} + + SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreGetFdInfoKHR ) - offsetof( SemaphoreGetFdInfoKHR, pNext ) ); + return *this; + } + + SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SemaphoreGetFdInfoKHR& operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default; +#else + bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SemaphoreGetWin32HandleInfoKHR + { + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) VULKAN_HPP_NOEXCEPT + : semaphore( semaphore_ ) + , handleType( handleType_ ) + {} + + VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , semaphore( rhs.semaphore ) + , handleType( rhs.handleType ) + {} + + SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SemaphoreGetWin32HandleInfoKHR ) - offsetof( SemaphoreGetWin32HandleInfoKHR, pNext ) ); + return *this; + } + + SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SemaphoreGetWin32HandleInfoKHR& operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT + { + semaphore = semaphore_; + return *this; + } + + SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT + { + handleType = handleType_; + return *this; + } + + operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default; +#else + bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( semaphore == rhs.semaphore ) + && ( handleType == rhs.handleType ); + } + + bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Semaphore semaphore = {}; + VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd; + }; + static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkShader.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShader.hpp new file mode 100644 index 00000000..fbb203b0 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShader.hpp @@ -0,0 +1,452 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ShaderModuleCreateInfo + { + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, + size_t codeSize_ = {}, + const uint32_t* pCode_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , codeSize( codeSize_ ) + , pCode( pCode_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , codeSize( rhs.codeSize ) + , pCode( rhs.pCode ) + {} + + ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleCreateInfo ) - offsetof( ShaderModuleCreateInfo, pNext ) ); + return *this; + } + + ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ShaderModuleCreateInfo& operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT + { + codeSize = codeSize_; + return *this; + } + + ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT + { + pCode = pCode_; + return *this; + } + + operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModuleCreateInfo const& ) const = default; +#else + bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( codeSize == rhs.codeSize ) + && ( pCode == rhs.pCode ); + } + + bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {}; + size_t codeSize = {}; + const uint32_t* pCode = {}; + }; + static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ShaderModuleValidationCacheCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {} ) VULKAN_HPP_NOEXCEPT + : validationCache( validationCache_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , validationCache( rhs.validationCache ) + {} + + ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ShaderModuleValidationCacheCreateInfoEXT ) - offsetof( ShaderModuleValidationCacheCreateInfoEXT, pNext ) ); + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ShaderModuleValidationCacheCreateInfoEXT& operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT + { + validationCache = validationCache_; + return *this; + } + + operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default; +#else + bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( validationCache == rhs.validationCache ); + } + + bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {}; + }; + static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ShaderResourceUsageAMD + { + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( uint32_t numUsedVgprs_ = {}, + uint32_t numUsedSgprs_ = {}, + uint32_t ldsSizePerLocalWorkGroup_ = {}, + size_t ldsUsageSizeInBytes_ = {}, + size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( numUsedVgprs_ ) + , numUsedSgprs( numUsedSgprs_ ) + , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) + , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) + , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + {} + + VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : numUsedVgprs( rhs.numUsedVgprs ) + , numUsedSgprs( rhs.numUsedSgprs ) + , ldsSizePerLocalWorkGroup( rhs.ldsSizePerLocalWorkGroup ) + , ldsUsageSizeInBytes( rhs.ldsUsageSizeInBytes ) + , scratchMemUsageInBytes( rhs.scratchMemUsageInBytes ) + {} + + ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ShaderResourceUsageAMD ) ); + return *this; + } + + ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ShaderResourceUsageAMD& operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderResourceUsageAMD const& ) const = default; +#else + bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( numUsedVgprs == rhs.numUsedVgprs ) + && ( numUsedSgprs == rhs.numUsedSgprs ) + && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup ) + && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes ) + && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes ); + } + + bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t numUsedVgprs = {}; + uint32_t numUsedSgprs = {}; + uint32_t ldsSizePerLocalWorkGroup = {}; + size_t ldsUsageSizeInBytes = {}; + size_t scratchMemUsageInBytes = {}; + }; + static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ShaderStatisticsInfoAMD + { + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, + uint32_t numPhysicalVgprs_ = {}, + uint32_t numPhysicalSgprs_ = {}, + uint32_t numAvailableVgprs_ = {}, + uint32_t numAvailableSgprs_ = {}, + std::array const& computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT + : shaderStageMask( shaderStageMask_ ) + , resourceUsage( resourceUsage_ ) + , numPhysicalVgprs( numPhysicalVgprs_ ) + , numPhysicalSgprs( numPhysicalSgprs_ ) + , numAvailableVgprs( numAvailableVgprs_ ) + , numAvailableSgprs( numAvailableSgprs_ ) + , computeWorkGroupSize{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( computeWorkGroupSize, computeWorkGroupSize_ ); + } + + VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : shaderStageMask( rhs.shaderStageMask ) + , resourceUsage( rhs.resourceUsage ) + , numPhysicalVgprs( rhs.numPhysicalVgprs ) + , numPhysicalSgprs( rhs.numPhysicalSgprs ) + , numAvailableVgprs( rhs.numAvailableVgprs ) + , numAvailableSgprs( rhs.numAvailableSgprs ) + , computeWorkGroupSize{} + { + VULKAN_HPP_NAMESPACE::ConstExpression1DArrayCopy::copy( computeWorkGroupSize, rhs.computeWorkGroupSize ); + } + + ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( ShaderStatisticsInfoAMD ) ); + return *this; + } + + ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ShaderStatisticsInfoAMD& operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default; +#else + bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( shaderStageMask == rhs.shaderStageMask ) + && ( resourceUsage == rhs.resourceUsage ) + && ( numPhysicalVgprs == rhs.numPhysicalVgprs ) + && ( numPhysicalSgprs == rhs.numPhysicalSgprs ) + && ( numAvailableVgprs == rhs.numAvailableVgprs ) + && ( numAvailableSgprs == rhs.numAvailableSgprs ) + && ( memcmp( computeWorkGroupSize, rhs.computeWorkGroupSize, 3 * sizeof( uint32_t ) ) == 0 ); + } + + bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {}; + VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {}; + uint32_t numPhysicalVgprs = {}; + uint32_t numPhysicalSgprs = {}; + uint32_t numAvailableVgprs = {}; + uint32_t numAvailableSgprs = {}; + uint32_t computeWorkGroupSize[3] = {}; + }; + static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkShading.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShading.hpp new file mode 100644 index 00000000..9f9a78d5 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShading.hpp @@ -0,0 +1,114 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkShared.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShared.hpp new file mode 100644 index 00000000..dace894e --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkShared.hpp @@ -0,0 +1,177 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SharedPresentSurfaceCapabilitiesKHR + { + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , sharedPresentSupportedUsageFlags( rhs.sharedPresentSupportedUsageFlags ) + {} + + SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SharedPresentSurfaceCapabilitiesKHR ) - offsetof( SharedPresentSurfaceCapabilitiesKHR, pNext ) ); + return *this; + } + + SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SharedPresentSurfaceCapabilitiesKHR& operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default; +#else + bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags ); + } + + bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {}; + }; + static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSparse.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSparse.hpp new file mode 100644 index 00000000..77634812 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSparse.hpp @@ -0,0 +1,385 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SparseImageFormatProperties + { + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT + : aspectMask( aspectMask_ ) + , imageGranularity( imageGranularity_ ) + , flags( flags_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const& rhs ) VULKAN_HPP_NOEXCEPT + : aspectMask( rhs.aspectMask ) + , imageGranularity( rhs.imageGranularity ) + , flags( rhs.flags ) + {} + + SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseImageFormatProperties ) ); + return *this; + } + + SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageFormatProperties& operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageFormatProperties const& ) const = default; +#else + bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( aspectMask == rhs.aspectMask ) + && ( imageGranularity == rhs.imageGranularity ) + && ( flags == rhs.flags ); + } + + bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {}; + VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {}; + }; + static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageFormatProperties2 + { + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {} ) VULKAN_HPP_NOEXCEPT + : properties( properties_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , properties( rhs.properties ) + {} + + SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SparseImageFormatProperties2 ) - offsetof( SparseImageFormatProperties2, pNext ) ); + return *this; + } + + SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageFormatProperties2& operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageFormatProperties2 const& ) const = default; +#else + bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( properties == rhs.properties ); + } + + bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {}; + }; + static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryRequirements + { + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, + uint32_t imageMipTailFirstLod_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT + : formatProperties( formatProperties_ ) + , imageMipTailFirstLod( imageMipTailFirstLod_ ) + , imageMipTailSize( imageMipTailSize_ ) + , imageMipTailOffset( imageMipTailOffset_ ) + , imageMipTailStride( imageMipTailStride_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const& rhs ) VULKAN_HPP_NOEXCEPT + : formatProperties( rhs.formatProperties ) + , imageMipTailFirstLod( rhs.imageMipTailFirstLod ) + , imageMipTailSize( rhs.imageMipTailSize ) + , imageMipTailOffset( rhs.imageMipTailOffset ) + , imageMipTailStride( rhs.imageMipTailStride ) + {} + + SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SparseImageMemoryRequirements ) ); + return *this; + } + + SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageMemoryRequirements& operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryRequirements const& ) const = default; +#else + bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( formatProperties == rhs.formatProperties ) + && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod ) + && ( imageMipTailSize == rhs.imageMipTailSize ) + && ( imageMipTailOffset == rhs.imageMipTailOffset ) + && ( imageMipTailStride == rhs.imageMipTailStride ); + } + + bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {}; + uint32_t imageMipTailFirstLod = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {}; + VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SparseImageMemoryRequirements2 + { + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {} ) VULKAN_HPP_NOEXCEPT + : memoryRequirements( memoryRequirements_ ) + {} + + VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , memoryRequirements( rhs.memoryRequirements ) + {} + + SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SparseImageMemoryRequirements2 ) - offsetof( SparseImageMemoryRequirements2, pNext ) ); + return *this; + } + + SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SparseImageMemoryRequirements2& operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default; +#else + bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( memoryRequirements == rhs.memoryRequirements ); + } + + bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {}; + }; + static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSpecialization.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSpecialization.hpp new file mode 100644 index 00000000..8bb2a1a9 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSpecialization.hpp @@ -0,0 +1,118 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkStencil.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkStencil.hpp new file mode 100644 index 00000000..8bb2a1a9 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkStencil.hpp @@ -0,0 +1,118 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkStream.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkStream.hpp new file mode 100644 index 00000000..7023da8c --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkStream.hpp @@ -0,0 +1,206 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_GGP + struct StreamDescriptorSurfaceCreateInfoGGP + { + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, + GgpStreamDescriptor streamDescriptor_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , streamDescriptor( streamDescriptor_ ) + {} + + VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , streamDescriptor( rhs.streamDescriptor ) + {} + + StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( StreamDescriptorSurfaceCreateInfoGGP ) - offsetof( StreamDescriptorSurfaceCreateInfoGGP, pNext ) ); + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + StreamDescriptorSurfaceCreateInfoGGP& operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + streamDescriptor = streamDescriptor_; + return *this; + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default; +#else + bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( streamDescriptor == rhs.streamDescriptor ); + } + + bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {}; + GgpStreamDescriptor streamDescriptor = {}; + }; + static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_GGP*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubmit.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubmit.hpp new file mode 100644 index 00000000..70541377 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubmit.hpp @@ -0,0 +1,260 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SubmitInfo + { + VULKAN_HPP_CONSTEXPR SubmitInfo( uint32_t waitSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, + uint32_t commandBufferCount_ = {}, + const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, + uint32_t signalSemaphoreCount_ = {}, + const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {} ) VULKAN_HPP_NOEXCEPT + : waitSemaphoreCount( waitSemaphoreCount_ ) + , pWaitSemaphores( pWaitSemaphores_ ) + , pWaitDstStageMask( pWaitDstStageMask_ ) + , commandBufferCount( commandBufferCount_ ) + , pCommandBuffers( pCommandBuffers_ ) + , signalSemaphoreCount( signalSemaphoreCount_ ) + , pSignalSemaphores( pSignalSemaphores_ ) + {} + + VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , waitSemaphoreCount( rhs.waitSemaphoreCount ) + , pWaitSemaphores( rhs.pWaitSemaphores ) + , pWaitDstStageMask( rhs.pWaitDstStageMask ) + , commandBufferCount( rhs.commandBufferCount ) + , pCommandBuffers( rhs.pCommandBuffers ) + , signalSemaphoreCount( rhs.signalSemaphoreCount ) + , pSignalSemaphores( rhs.pSignalSemaphores ) + {} + + SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubmitInfo ) - offsetof( SubmitInfo, pNext ) ); + return *this; + } + + SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubmitInfo& operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + waitSemaphoreCount = waitSemaphoreCount_; + return *this; + } + + SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pWaitSemaphores = pWaitSemaphores_; + return *this; + } + + SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT + { + pWaitDstStageMask = pWaitDstStageMask_; + return *this; + } + + SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferCount = commandBufferCount_; + return *this; + } + + SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT + { + pCommandBuffers = pCommandBuffers_; + return *this; + } + + SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT + { + signalSemaphoreCount = signalSemaphoreCount_; + return *this; + } + + SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT + { + pSignalSemaphores = pSignalSemaphores_; + return *this; + } + + operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubmitInfo const& ) const = default; +#else + bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( waitSemaphoreCount == rhs.waitSemaphoreCount ) + && ( pWaitSemaphores == rhs.pWaitSemaphores ) + && ( pWaitDstStageMask == rhs.pWaitDstStageMask ) + && ( commandBufferCount == rhs.commandBufferCount ) + && ( pCommandBuffers == rhs.pCommandBuffers ) + && ( signalSemaphoreCount == rhs.signalSemaphoreCount ) + && ( pSignalSemaphores == rhs.pSignalSemaphores ); + } + + bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo; + const void* pNext = {}; + uint32_t waitSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {}; + const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {}; + uint32_t commandBufferCount = {}; + const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {}; + uint32_t signalSemaphoreCount = {}; + const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {}; + }; + static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubpass.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubpass.hpp new file mode 100644 index 00000000..d023600b --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubpass.hpp @@ -0,0 +1,356 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SubpassBeginInfoKHR + { + VULKAN_HPP_CONSTEXPR SubpassBeginInfoKHR( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline ) VULKAN_HPP_NOEXCEPT + : contents( contents_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassBeginInfoKHR( SubpassBeginInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , contents( rhs.contents ) + {} + + SubpassBeginInfoKHR & operator=( SubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubpassBeginInfoKHR ) - offsetof( SubpassBeginInfoKHR, pNext ) ); + return *this; + } + + SubpassBeginInfoKHR( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassBeginInfoKHR& operator=( VkSubpassBeginInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassBeginInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassBeginInfoKHR & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT + { + contents = contents_; + return *this; + } + + operator VkSubpassBeginInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassBeginInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassBeginInfoKHR const& ) const = default; +#else + bool operator==( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( contents == rhs.contents ); + } + + bool operator!=( SubpassBeginInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline; + }; + static_assert( sizeof( SubpassBeginInfoKHR ) == sizeof( VkSubpassBeginInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassDescriptionDepthStencilResolveKHR + { + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolveKHR( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone, + VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone, + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment_ = {} ) VULKAN_HPP_NOEXCEPT + : depthResolveMode( depthResolveMode_ ) + , stencilResolveMode( stencilResolveMode_ ) + , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + {} + + VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolveKHR( SubpassDescriptionDepthStencilResolveKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , depthResolveMode( rhs.depthResolveMode ) + , stencilResolveMode( rhs.stencilResolveMode ) + , pDepthStencilResolveAttachment( rhs.pDepthStencilResolveAttachment ) + {} + + SubpassDescriptionDepthStencilResolveKHR & operator=( SubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubpassDescriptionDepthStencilResolveKHR ) - offsetof( SubpassDescriptionDepthStencilResolveKHR, pNext ) ); + return *this; + } + + SubpassDescriptionDepthStencilResolveKHR( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassDescriptionDepthStencilResolveKHR& operator=( VkSubpassDescriptionDepthStencilResolveKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassDescriptionDepthStencilResolveKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SubpassDescriptionDepthStencilResolveKHR & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + depthResolveMode = depthResolveMode_; + return *this; + } + + SubpassDescriptionDepthStencilResolveKHR & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT + { + stencilResolveMode = stencilResolveMode_; + return *this; + } + + SubpassDescriptionDepthStencilResolveKHR & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT + { + pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_; + return *this; + } + + operator VkSubpassDescriptionDepthStencilResolveKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassDescriptionDepthStencilResolveKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassDescriptionDepthStencilResolveKHR const& ) const = default; +#else + bool operator==( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( depthResolveMode == rhs.depthResolveMode ) + && ( stencilResolveMode == rhs.stencilResolveMode ) + && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment ); + } + + bool operator!=( SubpassDescriptionDepthStencilResolveKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolveKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone; + VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR::eNone; + const VULKAN_HPP_NAMESPACE::AttachmentReference2KHR* pDepthStencilResolveAttachment = {}; + }; + static_assert( sizeof( SubpassDescriptionDepthStencilResolveKHR ) == sizeof( VkSubpassDescriptionDepthStencilResolveKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SubpassEndInfoKHR + { + VULKAN_HPP_CONSTEXPR SubpassEndInfoKHR() VULKAN_HPP_NOEXCEPT + {} + + VULKAN_HPP_CONSTEXPR SubpassEndInfoKHR( SubpassEndInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + {} + + SubpassEndInfoKHR & operator=( SubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SubpassEndInfoKHR ) - offsetof( SubpassEndInfoKHR, pNext ) ); + return *this; + } + + SubpassEndInfoKHR( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SubpassEndInfoKHR& operator=( VkSubpassEndInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SubpassEndInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + operator VkSubpassEndInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSubpassEndInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SubpassEndInfoKHR const& ) const = default; +#else + bool operator==( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ); + } + + bool operator!=( SubpassEndInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfoKHR; + const void* pNext = {}; + }; + static_assert( sizeof( SubpassEndInfoKHR ) == sizeof( VkSubpassEndInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubresource.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubresource.hpp new file mode 100644 index 00000000..b5a88fe7 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSubresource.hpp @@ -0,0 +1,122 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSurface.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSurface.hpp new file mode 100644 index 00000000..08b203b0 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSurface.hpp @@ -0,0 +1,826 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SurfaceCapabilities2EXT + { + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + , supportedSurfaceCounters( supportedSurfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , minImageCount( rhs.minImageCount ) + , maxImageCount( rhs.maxImageCount ) + , currentExtent( rhs.currentExtent ) + , minImageExtent( rhs.minImageExtent ) + , maxImageExtent( rhs.maxImageExtent ) + , maxImageArrayLayers( rhs.maxImageArrayLayers ) + , supportedTransforms( rhs.supportedTransforms ) + , currentTransform( rhs.currentTransform ) + , supportedCompositeAlpha( rhs.supportedCompositeAlpha ) + , supportedUsageFlags( rhs.supportedUsageFlags ) + , supportedSurfaceCounters( rhs.supportedSurfaceCounters ) + {} + + SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilities2EXT ) - offsetof( SurfaceCapabilities2EXT, pNext ) ); + return *this; + } + + SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceCapabilities2EXT& operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilities2EXT const& ) const = default; +#else + bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ) + && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters ); + } + + bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT; + void* pNext = {}; + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {}; + }; + static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceCapabilitiesKHR + { + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( uint32_t minImageCount_ = {}, + uint32_t maxImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, + uint32_t maxImageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT + : minImageCount( minImageCount_ ) + , maxImageCount( maxImageCount_ ) + , currentExtent( currentExtent_ ) + , minImageExtent( minImageExtent_ ) + , maxImageExtent( maxImageExtent_ ) + , maxImageArrayLayers( maxImageArrayLayers_ ) + , supportedTransforms( supportedTransforms_ ) + , currentTransform( currentTransform_ ) + , supportedCompositeAlpha( supportedCompositeAlpha_ ) + , supportedUsageFlags( supportedUsageFlags_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : minImageCount( rhs.minImageCount ) + , maxImageCount( rhs.maxImageCount ) + , currentExtent( rhs.currentExtent ) + , minImageExtent( rhs.minImageExtent ) + , maxImageExtent( rhs.maxImageExtent ) + , maxImageArrayLayers( rhs.maxImageArrayLayers ) + , supportedTransforms( rhs.supportedTransforms ) + , currentTransform( rhs.currentTransform ) + , supportedCompositeAlpha( rhs.supportedCompositeAlpha ) + , supportedUsageFlags( rhs.supportedUsageFlags ) + {} + + SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SurfaceCapabilitiesKHR ) ); + return *this; + } + + SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceCapabilitiesKHR& operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default; +#else + bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( minImageCount == rhs.minImageCount ) + && ( maxImageCount == rhs.maxImageCount ) + && ( currentExtent == rhs.currentExtent ) + && ( minImageExtent == rhs.minImageExtent ) + && ( maxImageExtent == rhs.maxImageExtent ) + && ( maxImageArrayLayers == rhs.maxImageArrayLayers ) + && ( supportedTransforms == rhs.supportedTransforms ) + && ( currentTransform == rhs.currentTransform ) + && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha ) + && ( supportedUsageFlags == rhs.supportedUsageFlags ); + } + + bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t minImageCount = {}; + uint32_t maxImageCount = {}; + VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {}; + uint32_t maxImageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {}; + }; + static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceCapabilities2KHR + { + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCapabilities( surfaceCapabilities_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , surfaceCapabilities( rhs.surfaceCapabilities ) + {} + + SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilities2KHR ) - offsetof( SurfaceCapabilities2KHR, pNext ) ); + return *this; + } + + SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceCapabilities2KHR& operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilities2KHR const& ) const = default; +#else + bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCapabilities == rhs.surfaceCapabilities ); + } + + bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {}; + }; + static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceCapabilitiesFullScreenExclusiveEXT + { + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {} ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fullScreenExclusiveSupported( rhs.fullScreenExclusiveSupported ) + {} + + SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) - offsetof( SurfaceCapabilitiesFullScreenExclusiveEXT, pNext ) ); + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT& operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusiveSupported = fullScreenExclusiveSupported_; + return *this; + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default; +#else + bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported ); + } + + bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {}; + }; + static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceFormatKHR + { + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT + : format( format_ ) + , colorSpace( colorSpace_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : format( rhs.format ) + , colorSpace( rhs.colorSpace ) + {} + + SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( static_cast(this), &rhs, sizeof( SurfaceFormatKHR ) ); + return *this; + } + + SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceFormatKHR& operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFormatKHR const& ) const = default; +#else + bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( format == rhs.format ) + && ( colorSpace == rhs.colorSpace ); + } + + bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + }; + static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SurfaceFormat2KHR + { + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceFormat( surfaceFormat_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , surfaceFormat( rhs.surfaceFormat ) + {} + + SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFormat2KHR ) - offsetof( SurfaceFormat2KHR, pNext ) ); + return *this; + } + + SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceFormat2KHR& operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFormat2KHR const& ) const = default; +#else + bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceFormat == rhs.surfaceFormat ); + } + + bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {}; + }; + static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceFullScreenExclusiveInfoEXT + { + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault ) VULKAN_HPP_NOEXCEPT + : fullScreenExclusive( fullScreenExclusive_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , fullScreenExclusive( rhs.fullScreenExclusive ) + {} + + SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveInfoEXT ) - offsetof( SurfaceFullScreenExclusiveInfoEXT, pNext ) ); + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceFullScreenExclusiveInfoEXT& operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT + { + fullScreenExclusive = fullScreenExclusive_; + return *this; + } + + operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default; +#else + bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( fullScreenExclusive == rhs.fullScreenExclusive ); + } + + bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault; + }; + static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct SurfaceFullScreenExclusiveWin32InfoEXT + { + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {} ) VULKAN_HPP_NOEXCEPT + : hmonitor( hmonitor_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , hmonitor( rhs.hmonitor ) + {} + + SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) - offsetof( SurfaceFullScreenExclusiveWin32InfoEXT, pNext ) ); + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceFullScreenExclusiveWin32InfoEXT& operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT + { + hmonitor = hmonitor_; + return *this; + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default; +#else + bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( hmonitor == rhs.hmonitor ); + } + + bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT; + const void* pNext = {}; + HMONITOR hmonitor = {}; + }; + static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + struct SurfaceProtectedCapabilitiesKHR + { + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsProtected( supportsProtected_ ) + {} + + VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , supportsProtected( rhs.supportsProtected ) + {} + + SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SurfaceProtectedCapabilitiesKHR ) - offsetof( SurfaceProtectedCapabilitiesKHR, pNext ) ); + return *this; + } + + SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SurfaceProtectedCapabilitiesKHR& operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT + { + supportsProtected = supportsProtected_; + return *this; + } + + operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default; +#else + bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supportsProtected == rhs.supportsProtected ); + } + + bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {}; + }; + static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkSwapchain.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSwapchain.hpp new file mode 100644 index 00000000..89d1d330 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkSwapchain.hpp @@ -0,0 +1,512 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkSwapchain.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct SwapchainCounterCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {} ) VULKAN_HPP_NOEXCEPT + : surfaceCounters( surfaceCounters_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , surfaceCounters( rhs.surfaceCounters ) + {} + + SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SwapchainCounterCreateInfoEXT ) - offsetof( SwapchainCounterCreateInfoEXT, pNext ) ); + return *this; + } + + SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SwapchainCounterCreateInfoEXT& operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT + { + surfaceCounters = surfaceCounters_; + return *this; + } + + operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default; +#else + bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( surfaceCounters == rhs.surfaceCounters ); + } + + bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {}; + }; + static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SwapchainCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, + uint32_t minImageCount_ = {}, + VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, + VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, + uint32_t imageArrayLayers_ = {}, + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, + uint32_t queueFamilyIndexCount_ = {}, + const uint32_t* pQueueFamilyIndices_ = {}, + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, + VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , surface( surface_ ) + , minImageCount( minImageCount_ ) + , imageFormat( imageFormat_ ) + , imageColorSpace( imageColorSpace_ ) + , imageExtent( imageExtent_ ) + , imageArrayLayers( imageArrayLayers_ ) + , imageUsage( imageUsage_ ) + , imageSharingMode( imageSharingMode_ ) + , queueFamilyIndexCount( queueFamilyIndexCount_ ) + , pQueueFamilyIndices( pQueueFamilyIndices_ ) + , preTransform( preTransform_ ) + , compositeAlpha( compositeAlpha_ ) + , presentMode( presentMode_ ) + , clipped( clipped_ ) + , oldSwapchain( oldSwapchain_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , surface( rhs.surface ) + , minImageCount( rhs.minImageCount ) + , imageFormat( rhs.imageFormat ) + , imageColorSpace( rhs.imageColorSpace ) + , imageExtent( rhs.imageExtent ) + , imageArrayLayers( rhs.imageArrayLayers ) + , imageUsage( rhs.imageUsage ) + , imageSharingMode( rhs.imageSharingMode ) + , queueFamilyIndexCount( rhs.queueFamilyIndexCount ) + , pQueueFamilyIndices( rhs.pQueueFamilyIndices ) + , preTransform( rhs.preTransform ) + , compositeAlpha( rhs.compositeAlpha ) + , presentMode( rhs.presentMode ) + , clipped( rhs.clipped ) + , oldSwapchain( rhs.oldSwapchain ) + {} + + SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SwapchainCreateInfoKHR ) - offsetof( SwapchainCreateInfoKHR, pNext ) ); + return *this; + } + + SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SwapchainCreateInfoKHR& operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT + { + minImageCount = minImageCount_; + return *this; + } + + SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT + { + imageFormat = imageFormat_; + return *this; + } + + SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT + { + imageColorSpace = imageColorSpace_; + return *this; + } + + SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ ) VULKAN_HPP_NOEXCEPT + { + imageExtent = imageExtent_; + return *this; + } + + SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT + { + imageArrayLayers = imageArrayLayers_; + return *this; + } + + SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT + { + imageUsage = imageUsage_; + return *this; + } + + SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT + { + imageSharingMode = imageSharingMode_; + return *this; + } + + SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT + { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; + } + + SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT + { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; + } + + SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT + { + preTransform = preTransform_; + return *this; + } + + SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT + { + compositeAlpha = compositeAlpha_; + return *this; + } + + SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT + { + presentMode = presentMode_; + return *this; + } + + SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT + { + clipped = clipped_; + return *this; + } + + SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT + { + oldSwapchain = oldSwapchain_; + return *this; + } + + operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainCreateInfoKHR const& ) const = default; +#else + bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( surface == rhs.surface ) + && ( minImageCount == rhs.minImageCount ) + && ( imageFormat == rhs.imageFormat ) + && ( imageColorSpace == rhs.imageColorSpace ) + && ( imageExtent == rhs.imageExtent ) + && ( imageArrayLayers == rhs.imageArrayLayers ) + && ( imageUsage == rhs.imageUsage ) + && ( imageSharingMode == rhs.imageSharingMode ) + && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount ) + && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices ) + && ( preTransform == rhs.preTransform ) + && ( compositeAlpha == rhs.compositeAlpha ) + && ( presentMode == rhs.presentMode ) + && ( clipped == rhs.clipped ) + && ( oldSwapchain == rhs.oldSwapchain ); + } + + bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {}; + uint32_t minImageCount = {}; + VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined; + VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear; + VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {}; + uint32_t imageArrayLayers = {}; + VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {}; + VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive; + uint32_t queueFamilyIndexCount = {}; + const uint32_t* pQueueFamilyIndices = {}; + VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity; + VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque; + VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate; + VULKAN_HPP_NAMESPACE::Bool32 clipped = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {}; + }; + static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct SwapchainDisplayNativeHdrCreateInfoAMD + { + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {} ) VULKAN_HPP_NOEXCEPT + : localDimmingEnable( localDimmingEnable_ ) + {} + + VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , localDimmingEnable( rhs.localDimmingEnable ) + {} + + SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) - offsetof( SwapchainDisplayNativeHdrCreateInfoAMD, pNext ) ); + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + SwapchainDisplayNativeHdrCreateInfoAMD& operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT + { + localDimmingEnable = localDimmingEnable_; + return *this; + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default; +#else + bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( localDimmingEnable == rhs.localDimmingEnable ); + } + + bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {}; + }; + static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkTexture.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkTexture.hpp new file mode 100644 index 00000000..00a7fbdd --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkTexture.hpp @@ -0,0 +1,187 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct TextureLODGatherFormatPropertiesAMD + { + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {} ) VULKAN_HPP_NOEXCEPT + : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + {} + + VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , supportsTextureGatherLODBiasAMD( rhs.supportsTextureGatherLODBiasAMD ) + {} + + TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( TextureLODGatherFormatPropertiesAMD ) - offsetof( TextureLODGatherFormatPropertiesAMD, pNext ) ); + return *this; + } + + TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + TextureLODGatherFormatPropertiesAMD& operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default; +#else + bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD ); + } + + bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD; + void* pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {}; + }; + static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkValidation.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkValidation.hpp new file mode 100644 index 00000000..f590eb51 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkValidation.hpp @@ -0,0 +1,415 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct ValidationCacheCreateInfoEXT + { + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, + size_t initialDataSize_ = {}, + const void* pInitialData_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , initialDataSize( initialDataSize_ ) + , pInitialData( pInitialData_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , initialDataSize( rhs.initialDataSize ) + , pInitialData( rhs.pInitialData ) + {} + + ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ValidationCacheCreateInfoEXT ) - offsetof( ValidationCacheCreateInfoEXT, pNext ) ); + return *this; + } + + ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ValidationCacheCreateInfoEXT& operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT + { + initialDataSize = initialDataSize_; + return *this; + } + + ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT + { + pInitialData = pInitialData_; + return *this; + } + + operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default; +#else + bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( initialDataSize == rhs.initialDataSize ) + && ( pInitialData == rhs.pInitialData ); + } + + bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {}; + size_t initialDataSize = {}; + const void* pInitialData = {}; + }; + static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ValidationFeaturesEXT + { + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( uint32_t enabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, + uint32_t disabledValidationFeatureCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {} ) VULKAN_HPP_NOEXCEPT + : enabledValidationFeatureCount( enabledValidationFeatureCount_ ) + , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) + , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) + , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , enabledValidationFeatureCount( rhs.enabledValidationFeatureCount ) + , pEnabledValidationFeatures( rhs.pEnabledValidationFeatures ) + , disabledValidationFeatureCount( rhs.disabledValidationFeatureCount ) + , pDisabledValidationFeatures( rhs.pDisabledValidationFeatures ) + {} + + ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ValidationFeaturesEXT ) - offsetof( ValidationFeaturesEXT, pNext ) ); + return *this; + } + + ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ValidationFeaturesEXT& operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + enabledValidationFeatureCount = enabledValidationFeatureCount_; + return *this; + } + + ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pEnabledValidationFeatures = pEnabledValidationFeatures_; + return *this; + } + + ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationFeatureCount = disabledValidationFeatureCount_; + return *this; + } + + ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationFeatures = pDisabledValidationFeatures_; + return *this; + } + + operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationFeaturesEXT const& ) const = default; +#else + bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount ) + && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures ) + && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount ) + && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures ); + } + + bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT; + const void* pNext = {}; + uint32_t enabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {}; + uint32_t disabledValidationFeatureCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {}; + }; + static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct ValidationFlagsEXT + { + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {} ) VULKAN_HPP_NOEXCEPT + : disabledValidationCheckCount( disabledValidationCheckCount_ ) + , pDisabledValidationChecks( pDisabledValidationChecks_ ) + {} + + VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , disabledValidationCheckCount( rhs.disabledValidationCheckCount ) + , pDisabledValidationChecks( rhs.pDisabledValidationChecks ) + {} + + ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ValidationFlagsEXT ) - offsetof( ValidationFlagsEXT, pNext ) ); + return *this; + } + + ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ValidationFlagsEXT& operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT + { + disabledValidationCheckCount = disabledValidationCheckCount_; + return *this; + } + + ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT + { + pDisabledValidationChecks = pDisabledValidationChecks_; + return *this; + } + + operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ValidationFlagsEXT const& ) const = default; +#else + bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount ) + && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks ); + } + + bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT; + const void* pNext = {}; + uint32_t disabledValidationCheckCount = {}; + const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {}; + }; + static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkVertex.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkVertex.hpp new file mode 100644 index 00000000..c2e22e35 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkVertex.hpp @@ -0,0 +1,128 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkVi.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkVi.hpp new file mode 100644 index 00000000..37f81b81 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkVi.hpp @@ -0,0 +1,215 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_VI_NN + struct ViSurfaceCreateInfoNN + { + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, + void* window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , window( rhs.window ) + {} + + ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( ViSurfaceCreateInfoNN ) - offsetof( ViSurfaceCreateInfoNN, pNext ) ); + return *this; + } + + ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + ViSurfaceCreateInfoNN& operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default; +#else + bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( window == rhs.window ); + } + + bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {}; + void* window = {}; + }; + static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_VI_NN*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkViewport.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkViewport.hpp new file mode 100644 index 00000000..9ed7d552 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkViewport.hpp @@ -0,0 +1,130 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkViewport.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkWayland.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWayland.hpp new file mode 100644 index 00000000..591c9794 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWayland.hpp @@ -0,0 +1,228 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_WAYLAND_KHR + struct WaylandSurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, + struct wl_display* display_ = {}, + struct wl_surface* surface_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , display( display_ ) + , surface( surface_ ) + {} + + VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , display( rhs.display ) + , surface( rhs.surface ) + {} + + WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( WaylandSurfaceCreateInfoKHR ) - offsetof( WaylandSurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + WaylandSurfaceCreateInfoKHR& operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT + { + display = display_; + return *this; + } + + WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT + { + surface = surface_; + return *this; + } + + operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( display == rhs.display ) + && ( surface == rhs.surface ); + } + + bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {}; + struct wl_display* display = {}; + struct wl_surface* surface = {}; + }; + static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkWin32.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWin32.hpp new file mode 100644 index 00000000..251d62a7 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWin32.hpp @@ -0,0 +1,514 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkWayland.hpp" +#include "VkWin32.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoKHR + { + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, + const uint64_t* pAcquireKeys_ = {}, + const uint32_t* pAcquireTimeouts_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, + const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeouts( pAcquireTimeouts_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , acquireCount( rhs.acquireCount ) + , pAcquireSyncs( rhs.pAcquireSyncs ) + , pAcquireKeys( rhs.pAcquireKeys ) + , pAcquireTimeouts( rhs.pAcquireTimeouts ) + , releaseCount( rhs.releaseCount ) + , pReleaseSyncs( rhs.pReleaseSyncs ) + , pReleaseKeys( rhs.pReleaseKeys ) + {} + + Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) - offsetof( Win32KeyedMutexAcquireReleaseInfoKHR, pNext ) ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Win32KeyedMutexAcquireReleaseInfoKHR& operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeouts = pAcquireTimeouts_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default; +#else + bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeouts == rhs.pAcquireTimeouts ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR; + const void* pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; + const uint64_t* pAcquireKeys = {}; + const uint32_t* pAcquireTimeouts = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; + const uint64_t* pReleaseKeys = {}; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32KeyedMutexAcquireReleaseInfoNV + { + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( uint32_t acquireCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, + const uint64_t* pAcquireKeys_ = {}, + const uint32_t* pAcquireTimeoutMilliseconds_ = {}, + uint32_t releaseCount_ = {}, + const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, + const uint64_t* pReleaseKeys_ = {} ) VULKAN_HPP_NOEXCEPT + : acquireCount( acquireCount_ ) + , pAcquireSyncs( pAcquireSyncs_ ) + , pAcquireKeys( pAcquireKeys_ ) + , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) + , releaseCount( releaseCount_ ) + , pReleaseSyncs( pReleaseSyncs_ ) + , pReleaseKeys( pReleaseKeys_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , acquireCount( rhs.acquireCount ) + , pAcquireSyncs( rhs.pAcquireSyncs ) + , pAcquireKeys( rhs.pAcquireKeys ) + , pAcquireTimeoutMilliseconds( rhs.pAcquireTimeoutMilliseconds ) + , releaseCount( rhs.releaseCount ) + , pReleaseSyncs( rhs.pReleaseSyncs ) + , pReleaseKeys( rhs.pReleaseKeys ) + {} + + Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) - offsetof( Win32KeyedMutexAcquireReleaseInfoNV, pNext ) ); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Win32KeyedMutexAcquireReleaseInfoNV& operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT + { + acquireCount = acquireCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireSyncs = pAcquireSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireKeys = pAcquireKeys_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT + { + pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT + { + releaseCount = releaseCount_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseSyncs = pReleaseSyncs_; + return *this; + } + + Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT + { + pReleaseKeys = pReleaseKeys_; + return *this; + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default; +#else + bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( acquireCount == rhs.acquireCount ) + && ( pAcquireSyncs == rhs.pAcquireSyncs ) + && ( pAcquireKeys == rhs.pAcquireKeys ) + && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds ) + && ( releaseCount == rhs.releaseCount ) + && ( pReleaseSyncs == rhs.pReleaseSyncs ) + && ( pReleaseKeys == rhs.pReleaseKeys ); + } + + bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV; + const void* pNext = {}; + uint32_t acquireCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {}; + const uint64_t* pAcquireKeys = {}; + const uint32_t* pAcquireTimeoutMilliseconds = {}; + uint32_t releaseCount = {}; + const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {}; + const uint64_t* pReleaseKeys = {}; + }; + static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#ifdef VK_USE_PLATFORM_WIN32_KHR + struct Win32SurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, + HINSTANCE hinstance_ = {}, + HWND hwnd_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , hinstance( hinstance_ ) + , hwnd( hwnd_ ) + {} + + VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , hinstance( rhs.hinstance ) + , hwnd( rhs.hwnd ) + {} + + Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( Win32SurfaceCreateInfoKHR ) - offsetof( Win32SurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + Win32SurfaceCreateInfoKHR& operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT + { + hinstance = hinstance_; + return *this; + } + + Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT + { + hwnd = hwnd_; + return *this; + } + + operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( hinstance == rhs.hinstance ) + && ( hwnd == rhs.hwnd ); + } + + bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {}; + HINSTANCE hinstance = {}; + HWND hwnd = {}; + }; + static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkWrite.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWrite.hpp new file mode 100644 index 00000000..0908918a --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkWrite.hpp @@ -0,0 +1,454 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkWayland.hpp" +#include "VkWin32.hpp" +#include "VkWrite.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ + struct WriteDescriptorSet + { + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, + uint32_t dstBinding_ = {}, + uint32_t dstArrayElement_ = {}, + uint32_t descriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, + const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {} ) VULKAN_HPP_NOEXCEPT + : dstSet( dstSet_ ) + , dstBinding( dstBinding_ ) + , dstArrayElement( dstArrayElement_ ) + , descriptorCount( descriptorCount_ ) + , descriptorType( descriptorType_ ) + , pImageInfo( pImageInfo_ ) + , pBufferInfo( pBufferInfo_ ) + , pTexelBufferView( pTexelBufferView_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , dstSet( rhs.dstSet ) + , dstBinding( rhs.dstBinding ) + , dstArrayElement( rhs.dstArrayElement ) + , descriptorCount( rhs.descriptorCount ) + , descriptorType( rhs.descriptorType ) + , pImageInfo( rhs.pImageInfo ) + , pBufferInfo( rhs.pBufferInfo ) + , pTexelBufferView( rhs.pTexelBufferView ) + {} + + WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSet ) - offsetof( WriteDescriptorSet, pNext ) ); + return *this; + } + + WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + WriteDescriptorSet& operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT + { + dstSet = dstSet_; + return *this; + } + + WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT + { + dstBinding = dstBinding_; + return *this; + } + + WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT + { + dstArrayElement = dstArrayElement_; + return *this; + } + + WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT + { + descriptorCount = descriptorCount_; + return *this; + } + + WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT + { + descriptorType = descriptorType_; + return *this; + } + + WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT + { + pImageInfo = pImageInfo_; + return *this; + } + + WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT + { + pBufferInfo = pBufferInfo_; + return *this; + } + + WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT + { + pTexelBufferView = pTexelBufferView_; + return *this; + } + + operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSet const& ) const = default; +#else + bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dstSet == rhs.dstSet ) + && ( dstBinding == rhs.dstBinding ) + && ( dstArrayElement == rhs.dstArrayElement ) + && ( descriptorCount == rhs.descriptorCount ) + && ( descriptorType == rhs.descriptorType ) + && ( pImageInfo == rhs.pImageInfo ) + && ( pBufferInfo == rhs.pBufferInfo ) + && ( pTexelBufferView == rhs.pTexelBufferView ); + } + + bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {}; + uint32_t dstBinding = {}; + uint32_t dstArrayElement = {}; + uint32_t descriptorCount = {}; + VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler; + const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {}; + const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {}; + const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {}; + }; + static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct WriteDescriptorSetAccelerationStructureNV + { + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {} ) VULKAN_HPP_NOEXCEPT + : accelerationStructureCount( accelerationStructureCount_ ) + , pAccelerationStructures( pAccelerationStructures_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , accelerationStructureCount( rhs.accelerationStructureCount ) + , pAccelerationStructures( rhs.pAccelerationStructures ) + {} + + WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetAccelerationStructureNV ) - offsetof( WriteDescriptorSetAccelerationStructureNV, pNext ) ); + return *this; + } + + WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + WriteDescriptorSetAccelerationStructureNV& operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT + { + accelerationStructureCount = accelerationStructureCount_; + return *this; + } + + WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT + { + pAccelerationStructures = pAccelerationStructures_; + return *this; + } + + operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSetAccelerationStructureNV const& ) const = default; +#else + bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( accelerationStructureCount == rhs.accelerationStructureCount ) + && ( pAccelerationStructures == rhs.pAccelerationStructures ); + } + + bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV; + const void* pNext = {}; + uint32_t accelerationStructureCount = {}; + const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {}; + }; + static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); + + struct WriteDescriptorSetInlineUniformBlockEXT + { + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( uint32_t dataSize_ = {}, + const void* pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize( dataSize_ ) + , pData( pData_ ) + {} + + VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , dataSize( rhs.dataSize ) + , pData( rhs.pData ) + {} + + WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( WriteDescriptorSetInlineUniformBlockEXT ) - offsetof( WriteDescriptorSetInlineUniformBlockEXT, pNext ) ); + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + WriteDescriptorSetInlineUniformBlockEXT& operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + + operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default; +#else + bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( dataSize == rhs.dataSize ) + && ( pData == rhs.pData ); + } + + bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT; + const void* pNext = {}; + uint32_t dataSize = {}; + const void* pData = {}; + }; + static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkX.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkX.hpp new file mode 100644 index 00000000..ccc9bc48 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkX.hpp @@ -0,0 +1,133 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkWayland.hpp" +#include "VkWin32.hpp" +#include "VkWrite.hpp" + +namespace VULKAN_HPP_NAMESPACE +{} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkXcb.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkXcb.hpp new file mode 100644 index 00000000..d9a884ad --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkXcb.hpp @@ -0,0 +1,232 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkWayland.hpp" +#include "VkWin32.hpp" +#include "VkWrite.hpp" +#include "VkX.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_XCB_KHR + struct XcbSurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, + xcb_connection_t* connection_ = {}, + xcb_window_t window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , connection( connection_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , connection( rhs.connection ) + , window( rhs.window ) + {} + + XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( XcbSurfaceCreateInfoKHR ) - offsetof( XcbSurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + XcbSurfaceCreateInfoKHR& operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT + { + connection = connection_; + return *this; + } + + XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( connection == rhs.connection ) + && ( window == rhs.window ); + } + + bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {}; + xcb_connection_t* connection = {}; + xcb_window_t window = {}; + }; + static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/struct/VkXlib.hpp b/app/libraries/VulkanHpp/vulkan/internal/struct/VkXlib.hpp new file mode 100644 index 00000000..e04feb87 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/struct/VkXlib.hpp @@ -0,0 +1,233 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "../handles.hpp" +#include "VkAcquire.hpp" +#include "VkAcceleration.hpp" +#include "VkApplication.hpp" +#include "VkInitialize.hpp" +#include "VkAllocation.hpp" +#include "VkExternal.hpp" +#include "VkBind.hpp" +#include "VkObject.hpp" +#include "VkCooperative.hpp" +#include "VkAndroid.hpp" +#include "VkImport.hpp" +#include "VkImage.hpp" +#include "VkDescriptor.hpp" +#include "VkBase.hpp" +#include "VkAttachment.hpp" +#include "VkBuffer.hpp" +#include "VkFramebuffer.hpp" +#include "VkCalibrated.hpp" +#include "VkDevice.hpp" +#include "VkSubresource.hpp" +#include "VkCheckpoint.hpp" +#include "VkConformance.hpp" +#include "VkClear.hpp" +#include "VkStream.hpp" +#include "VkCmd.hpp" +#include "VkCoarse.hpp" +#include "VkExtension.hpp" +#include "VkCommand.hpp" +#include "VkFormat.hpp" +#include "VkMetal.hpp" +#include "VkComponent.hpp" +#include "VkCopy.hpp" +#include "VkCompute.hpp" +#include "VkPast.hpp" +#include "VkConditional.hpp" +#include "VkMapped.hpp" +#include "VkD3D.hpp" +#include "VkDebug.hpp" +#include "VkDedicated.hpp" +#include "VkFence.hpp" +#include "VkDispatch.hpp" +#include "VkPipeline.hpp" +#include "VkViewport.hpp" +#include "VkDraw.hpp" +#include "VkDisplay.hpp" +#include "VkDrm.hpp" +#include "VkEvent.hpp" +#include "VkExport.hpp" +#include "VkRay.hpp" +#include "VkExtent.hpp" +#include "VkPerformance.hpp" +#include "VkFilter.hpp" +#include "VkRender.hpp" +#include "VkGeometry.hpp" +#include "VkGraphics.hpp" +#include "VkHdr.hpp" +#include "VkHeadless.hpp" +#include "VkMultisample.hpp" +#include "VkI.hpp" +#include "VkIndirect.hpp" +#include "VkInput.hpp" +#include "VkOffset.hpp" +#include "VkMemory.hpp" +#include "VkInstance.hpp" +#include "VkLayer.hpp" +#include "VkMac.hpp" +#include "VkPhysical.hpp" +#include "VkPresent.hpp" +#include "VkProtected.hpp" +#include "VkPush.hpp" +#include "VkQuery.hpp" +#include "VkQueue.hpp" +#include "VkRect.hpp" +#include "VkRefresh.hpp" +#include "VkSample.hpp" +#include "VkSampler.hpp" +#include "VkSemaphore.hpp" +#include "VkShader.hpp" +#include "VkShading.hpp" +#include "VkShared.hpp" +#include "VkSwapchain.hpp" +#include "VkSparse.hpp" +#include "VkSpecialization.hpp" +#include "VkStencil.hpp" +#include "VkSubmit.hpp" +#include "VkSubpass.hpp" +#include "VkSurface.hpp" +#include "VkTexture.hpp" +#include "VkValidation.hpp" +#include "VkVertex.hpp" +#include "VkVi.hpp" +#include "VkWayland.hpp" +#include "VkWin32.hpp" +#include "VkWrite.hpp" +#include "VkX.hpp" +#include "VkXcb.hpp" + +namespace VULKAN_HPP_NAMESPACE +{ +#ifdef VK_USE_PLATFORM_XLIB_KHR + struct XlibSurfaceCreateInfoKHR + { + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, + Display* dpy_ = {}, + Window window_ = {} ) VULKAN_HPP_NOEXCEPT + : flags( flags_ ) + , dpy( dpy_ ) + , window( window_ ) + {} + + VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const& rhs ) VULKAN_HPP_NOEXCEPT + : pNext( rhs.pNext ) + , flags( rhs.flags ) + , dpy( rhs.dpy ) + , window( rhs.window ) + {} + + XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + memcpy( &pNext, &rhs.pNext, sizeof( XlibSurfaceCreateInfoKHR ) - offsetof( XlibSurfaceCreateInfoKHR, pNext ) ); + return *this; + } + + XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = rhs; + } + + XlibSurfaceCreateInfoKHR& operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast(&rhs); + return *this; + } + + XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT + { + dpy = dpy_; + return *this; + } + + XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT + { + window = window_; + return *this; + } + + operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR) + auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default; +#else + bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) + && ( pNext == rhs.pNext ) + && ( flags == rhs.flags ) + && ( dpy == rhs.dpy ) + && ( window == rhs.window ); + } + + bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + const VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR; + const void* pNext = {}; + VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {}; + Display* dpy = {}; + Window window = {}; + }; + static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" ); + static_assert( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ +} // namespace VULKAN_HPP_NAMESPACE diff --git a/app/libraries/VulkanHpp/vulkan/internal/structs.hpp b/app/libraries/VulkanHpp/vulkan/internal/structs.hpp new file mode 100644 index 00000000..92a38dc7 --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/internal/structs.hpp @@ -0,0 +1,132 @@ +// Copyright (c) 2015-2019 The Khronos Group Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ---- Exceptions to the Apache 2.0 License: ---- +// +// As an exception, if you use this Software to generate code and portions of +// this Software are embedded into the generated code as a result, you may +// redistribute such product without providing attribution as would otherwise +// be required by Sections 4(a), 4(b) and 4(d) of the License. +// +// In addition, if you combine or link code generated by this Software with +// software that is licensed under the GPLv2 or the LGPL v2.0 or 2.1 +// ("`Combined Software`") and if a court of competent jurisdiction determines +// that the patent provision (Section 3), the indemnity provision (Section 9) +// or other Section of the License conflicts with the conditions of the +// applicable GPL or LGPL license, you may retroactively and prospectively +// choose to deem waived or otherwise exclude such Section(s) of the License, +// but only in their entirety and only with respect to the Combined Software. +// + +// This header is generated from the Khronos Vulkan XML API Registry. + +#pragma once + +#include "struct/VkAcquire.hpp" +#include "struct/VkAcceleration.hpp" +#include "struct/VkApplication.hpp" +#include "struct/VkInitialize.hpp" +#include "struct/VkAllocation.hpp" +#include "struct/VkExternal.hpp" +#include "struct/VkBind.hpp" +#include "struct/VkObject.hpp" +#include "struct/VkCooperative.hpp" +#include "struct/VkAndroid.hpp" +#include "struct/VkImport.hpp" +#include "struct/VkImage.hpp" +#include "struct/VkDescriptor.hpp" +#include "struct/VkBase.hpp" +#include "struct/VkAttachment.hpp" +#include "struct/VkBuffer.hpp" +#include "struct/VkFramebuffer.hpp" +#include "struct/VkCalibrated.hpp" +#include "struct/VkDevice.hpp" +#include "struct/VkSubresource.hpp" +#include "struct/VkCheckpoint.hpp" +#include "struct/VkConformance.hpp" +#include "struct/VkClear.hpp" +#include "struct/VkStream.hpp" +#include "struct/VkCmd.hpp" +#include "struct/VkCoarse.hpp" +#include "struct/VkExtension.hpp" +#include "struct/VkCommand.hpp" +#include "struct/VkFormat.hpp" +#include "struct/VkMetal.hpp" +#include "struct/VkComponent.hpp" +#include "struct/VkCopy.hpp" +#include "struct/VkCompute.hpp" +#include "struct/VkPast.hpp" +#include "struct/VkConditional.hpp" +#include "struct/VkMapped.hpp" +#include "struct/VkD3D.hpp" +#include "struct/VkDebug.hpp" +#include "struct/VkDedicated.hpp" +#include "struct/VkFence.hpp" +#include "struct/VkDispatch.hpp" +#include "struct/VkPipeline.hpp" +#include "struct/VkViewport.hpp" +#include "struct/VkDraw.hpp" +#include "struct/VkDisplay.hpp" +#include "struct/VkDrm.hpp" +#include "struct/VkEvent.hpp" +#include "struct/VkExport.hpp" +#include "struct/VkRay.hpp" +#include "struct/VkExtent.hpp" +#include "struct/VkPerformance.hpp" +#include "struct/VkFilter.hpp" +#include "struct/VkRender.hpp" +#include "struct/VkGeometry.hpp" +#include "struct/VkGraphics.hpp" +#include "struct/VkHdr.hpp" +#include "struct/VkHeadless.hpp" +#include "struct/VkMultisample.hpp" +#include "struct/VkI.hpp" +#include "struct/VkIndirect.hpp" +#include "struct/VkInput.hpp" +#include "struct/VkOffset.hpp" +#include "struct/VkMemory.hpp" +#include "struct/VkInstance.hpp" +#include "struct/VkLayer.hpp" +#include "struct/VkMac.hpp" +#include "struct/VkPhysical.hpp" +#include "struct/VkPresent.hpp" +#include "struct/VkProtected.hpp" +#include "struct/VkPush.hpp" +#include "struct/VkQuery.hpp" +#include "struct/VkQueue.hpp" +#include "struct/VkRect.hpp" +#include "struct/VkRefresh.hpp" +#include "struct/VkSample.hpp" +#include "struct/VkSampler.hpp" +#include "struct/VkSemaphore.hpp" +#include "struct/VkShader.hpp" +#include "struct/VkShading.hpp" +#include "struct/VkShared.hpp" +#include "struct/VkSwapchain.hpp" +#include "struct/VkSparse.hpp" +#include "struct/VkSpecialization.hpp" +#include "struct/VkStencil.hpp" +#include "struct/VkSubmit.hpp" +#include "struct/VkSubpass.hpp" +#include "struct/VkSurface.hpp" +#include "struct/VkTexture.hpp" +#include "struct/VkValidation.hpp" +#include "struct/VkVertex.hpp" +#include "struct/VkVi.hpp" +#include "struct/VkWayland.hpp" +#include "struct/VkWin32.hpp" +#include "struct/VkWrite.hpp" +#include "struct/VkX.hpp" +#include "struct/VkXcb.hpp" +#include "struct/VkXlib.hpp" diff --git a/app/libraries/VulkanHpp/vulkan/vulkan.hpp b/app/libraries/VulkanHpp/vulkan/vulkan.hpp new file mode 100644 index 00000000..7baa8f1e --- /dev/null +++ b/app/libraries/VulkanHpp/vulkan/vulkan.hpp @@ -0,0 +1,3 @@ +#pragma once + +#include "internal/dynamicDispatch.hpp"